diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java index c1c29780a47..676eec104b1 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java @@ -166,7 +166,7 @@ public class CommonStats implements Streamable, ToXContent { completion = indexShard.completionStats(flags.completionDataFields()); break; case Segments: - segments = indexShard.segmentStats(); + segments = indexShard.segmentStats(flags.includeSegmentFileSizes()); break; case Percolate: percolate = indexShard.percolateStats(); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java index fb306337886..39608c72a15 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStatsFlags.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -38,6 +39,7 @@ public class CommonStatsFlags implements Streamable, Cloneable { private String[] groups = null; private String[] fieldDataFields = null; private String[] completionDataFields = null; + private boolean includeSegmentFileSizes = false; /** @@ -62,6 +64,7 @@ public class CommonStatsFlags implements Streamable, Cloneable { groups = null; fieldDataFields = null; completionDataFields = null; + includeSegmentFileSizes = false; return this; } @@ -74,6 +77,7 @@ public class CommonStatsFlags implements Streamable, Cloneable { groups = null; fieldDataFields = null; completionDataFields = null; + includeSegmentFileSizes = false; return this; } @@ -137,6 +141,15 @@ public class CommonStatsFlags implements Streamable, Cloneable { return this.completionDataFields; } + public CommonStatsFlags includeSegmentFileSizes(boolean includeSegmentFileSizes) { + this.includeSegmentFileSizes = includeSegmentFileSizes; + return this; + } + + public boolean includeSegmentFileSizes() { + return this.includeSegmentFileSizes; + } + public boolean isSet(Flag flag) { return flags.contains(flag); } @@ -177,6 +190,9 @@ public class CommonStatsFlags implements Streamable, Cloneable { out.writeStringArrayNullable(groups); out.writeStringArrayNullable(fieldDataFields); out.writeStringArrayNullable(completionDataFields); + if (out.getVersion().onOrAfter(Version.V_5_0_0)) { + out.writeBoolean(includeSegmentFileSizes); + } } @Override @@ -192,6 +208,11 @@ public class CommonStatsFlags implements Streamable, Cloneable { groups = in.readStringArray(); fieldDataFields = in.readStringArray(); completionDataFields = in.readStringArray(); + if (in.getVersion().onOrAfter(Version.V_5_0_0)) { + includeSegmentFileSizes = in.readBoolean(); + } else { + includeSegmentFileSizes = false; + } } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java index 792dde1f45b..3a74d896fba 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java @@ -265,6 +265,15 @@ public class IndicesStatsRequest extends BroadcastRequest { return flags.isSet(Flag.Recovery); } + public boolean includeSegmentFileSizes() { + return flags.includeSegmentFileSizes(); + } + + public IndicesStatsRequest includeSegmentFileSizes(boolean includeSegmentFileSizes) { + flags.includeSegmentFileSizes(includeSegmentFileSizes); + return this; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java index 21540d31529..0ae21a3ac09 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java @@ -166,4 +166,9 @@ public class IndicesStatsRequestBuilder extends BroadcastOperationRequestBuilder request.recovery(recovery); return this; } + + public IndicesStatsRequestBuilder setIncludeSegmentFileSizes(boolean includeSegmentFileSizes) { + request.includeSegmentFileSizes(includeSegmentFileSizes); + return this; + } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 4bed5f918a4..2189973d9b7 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -144,6 +144,7 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< } if (request.segments()) { flags.set(CommonStatsFlags.Flag.Segments); + flags.includeSegmentFileSizes(request.includeSegmentFileSizes()); } if (request.completion()) { flags.set(CommonStatsFlags.Flag.Completion); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 4b53ee25e0c..707bf8de57f 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -60,9 +60,11 @@ import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.LongSupplier; /** * @@ -74,27 +76,41 @@ public class TransportBulkAction extends HandledTransportAction listener) { - final long startTime = System.currentTimeMillis(); + final long startTime = relativeTime(); final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); - if (autoCreateIndex.needToCheck()) { + if (needToCheck()) { // Keep track of all unique indices and all unique types per index for the create index requests: final Map> indicesAndTypes = new HashMap<>(); for (ActionRequest request : bulkRequest.requests) { @@ -113,7 +129,7 @@ public class TransportBulkAction extends HandledTransportAction> entry : indicesAndTypes.entrySet()) { final String index = entry.getKey(); - if (autoCreateIndex.shouldAutoCreate(index, state)) { + if (shouldAutoCreate(index, state)) { CreateIndexRequest createIndexRequest = new CreateIndexRequest(); createIndexRequest.index(index); for (String type : entry.getValue()) { @@ -164,6 +180,14 @@ public class TransportBulkAction extends HandledTransportAction responses, int idx, ActionRequest request, String index, Throwable e) { if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; @@ -196,15 +220,15 @@ public class TransportBulkAction extends HandledTransportAction listener) { - final long startTimeNanos = System.nanoTime(); + final long startTimeNanos = relativeTime(); executeBulk(bulkRequest, startTimeNanos, listener, new AtomicArray<>(bulkRequest.requests.size())); } private long buildTookInMillis(long startTimeNanos) { - return TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNanos); + return TimeUnit.NANOSECONDS.toMillis(relativeTime() - startTimeNanos); } - private void executeBulk(final BulkRequest bulkRequest, final long startTimeNanos, final ActionListener listener, final AtomicArray responses ) { + void executeBulk(final BulkRequest bulkRequest, final long startTimeNanos, final ActionListener listener, final AtomicArray responses ) { final ClusterState clusterState = clusterService.state(); // TODO use timeout to wait here if its blocked... clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.WRITE); @@ -398,7 +422,6 @@ public class TransportBulkAction extends HandledTransportAction listener) { + AsyncAction(Task task, NodesRequest request, ActionListener listener) { this.task = task; this.request = request; this.listener = listener; @@ -135,7 +135,7 @@ public abstract class TransportNodesAction(this.nodesIds.length); } - private void start() { + void start() { if (nodesIds.length == 0) { // nothing to notify threadPool.generic().execute(new Runnable() { @@ -158,11 +158,6 @@ public abstract class TransportNodesAction ENFORCE_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( - TransportSettings.BIND_HOST, - TransportSettings.HOST, - TransportSettings.PUBLISH_HOST, - NetworkService.GLOBAL_NETWORK_HOST_SETTING, - NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING, - NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING - ))); - - private static boolean enforceLimits(Settings settings) { - if (Build.CURRENT.isSnapshot()) { - return false; - } - for (Setting setting : ENFORCE_SETTINGS) { - if (setting.exists(settings)) { - return true; - } - } - return false; - } - - static void enforceOrLogLimits(Settings settings) { // pkg private for testing - /* We enforce limits once any network host is configured. In this case we assume the node is running in production - * and all production limit checks must pass. This should be extended as we go to settings like: - * - discovery.zen.minimum_master_nodes - * - discovery.zen.ping.unicast.hosts is set if we use zen disco - * - ensure we can write in all data directories - * - fail if mlockall failed and was configured - * - fail if vm.max_map_count is under a certain limit (not sure if this works cross platform) - * - fail if the default cluster.name is used, if this is setup on network a real clustername should be used?*/ - final boolean enforceLimits = enforceLimits(settings); - final ESLogger logger = Loggers.getLogger(Bootstrap.class); - final long maxFileDescriptorCount = ProcessProbe.getInstance().getMaxFileDescriptorCount(); - if (maxFileDescriptorCount != -1) { - final int fileDescriptorCountThreshold = (1 << 16); - if (maxFileDescriptorCount < fileDescriptorCountThreshold) { - if (enforceLimits){ - throw new IllegalStateException("max file descriptors [" + maxFileDescriptorCount - + "] for elasticsearch process likely too low, increase it to at least [" + fileDescriptorCountThreshold +"]"); - } - logger.warn( - "max file descriptors [{}] for elasticsearch process likely too low, consider increasing to at least [{}]", - maxFileDescriptorCount, fileDescriptorCountThreshold); - } - } - } } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java new file mode 100644 index 00000000000..6ac3c477fd7 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java @@ -0,0 +1,252 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.apache.lucene.util.Constants; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.monitor.process.ProcessProbe; +import org.elasticsearch.transport.TransportSettings; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +/** + * We enforce limits once any network host is configured. In this case we assume the node is running in production + * and all production limit checks must pass. This should be extended as we go to settings like: + * - discovery.zen.minimum_master_nodes + * - discovery.zen.ping.unicast.hosts is set if we use zen disco + * - ensure we can write in all data directories + * - fail if vm.max_map_count is under a certain limit (not sure if this works cross platform) + * - fail if the default cluster.name is used, if this is setup on network a real clustername should be used? + */ +final class BootstrapCheck { + + private BootstrapCheck() { + } + + /** + * checks the current limits against the snapshot or release build + * checks + * + * @param settings the current node settings + */ + public static void check(final Settings settings) { + check(enforceLimits(settings), checks(settings)); + } + + /** + * executes the provided checks and fails the node if + * enforceLimits is true, otherwise logs warnings + * + * @param enforceLimits true if the checks should be enforced or + * warned + * @param checks the checks to execute + */ + // visible for testing + static void check(final boolean enforceLimits, final List checks) { + final ESLogger logger = Loggers.getLogger(BootstrapCheck.class); + + for (final Check check : checks) { + final boolean fail = check.check(); + if (fail) { + if (enforceLimits) { + throw new RuntimeException(check.errorMessage()); + } else { + logger.warn(check.errorMessage()); + } + } + } + } + + /** + * The set of settings such that if any are set for the node, then + * the checks are enforced + * + * @return the enforcement settings + */ + // visible for testing + static Set enforceSettings() { + return Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + TransportSettings.BIND_HOST, + TransportSettings.HOST, + TransportSettings.PUBLISH_HOST, + NetworkService.GLOBAL_NETWORK_HOST_SETTING, + NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING, + NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING + ))); + } + + /** + * Tests if the checks should be enforced + * + * @param settings the current node settings + * @return true if the checks should be enforced + */ + // visible for testing + static boolean enforceLimits(final Settings settings) { + return enforceSettings().stream().anyMatch(s -> s.exists(settings)); + } + + // the list of checks to execute + private static List checks(final Settings settings) { + final List checks = new ArrayList<>(); + final FileDescriptorCheck fileDescriptorCheck + = Constants.MAC_OS_X ? new OsXFileDescriptorCheck() : new FileDescriptorCheck(); + checks.add(fileDescriptorCheck); + checks.add(new MlockallCheck(BootstrapSettings.MLOCKALL_SETTING.get(settings))); + if (Constants.LINUX) { + checks.add(new MaxNumberOfThreadsCheck()); + } + return Collections.unmodifiableList(checks); + } + + /** + * Encapsulates a limit check + */ + interface Check { + + /** + * test if the node fails the check + * + * @return true if the node failed the check + */ + boolean check(); + + /** + * the message for a failed check + * + * @return the error message on check failure + */ + String errorMessage(); + + } + + static class OsXFileDescriptorCheck extends FileDescriptorCheck { + + public OsXFileDescriptorCheck() { + // see constant OPEN_MAX defined in + // /usr/include/sys/syslimits.h on OS X and its use in JVM + // initialization in int os:init_2(void) defined in the JVM + // code for BSD (contains OS X) + super(10240); + } + + } + + // visible for testing + static class FileDescriptorCheck implements Check { + + private final int limit; + + FileDescriptorCheck() { + this(1 << 16); + } + + protected FileDescriptorCheck(final int limit) { + if (limit <= 0) { + throw new IllegalArgumentException("limit must be positive but was [" + limit + "]"); + } + this.limit = limit; + } + + public final boolean check() { + final long maxFileDescriptorCount = getMaxFileDescriptorCount(); + return maxFileDescriptorCount != -1 && maxFileDescriptorCount < limit; + } + + @Override + public final String errorMessage() { + return String.format( + Locale.ROOT, + "max file descriptors [%d] for elasticsearch process likely too low, increase to at least [%d]", + getMaxFileDescriptorCount(), + limit + ); + } + + // visible for testing + long getMaxFileDescriptorCount() { + return ProcessProbe.getInstance().getMaxFileDescriptorCount(); + } + + } + + // visible for testing + static class MlockallCheck implements Check { + + private final boolean mlockallSet; + + public MlockallCheck(final boolean mlockAllSet) { + this.mlockallSet = mlockAllSet; + } + + @Override + public boolean check() { + return mlockallSet && !isMemoryLocked(); + } + + @Override + public String errorMessage() { + return "memory locking requested for elasticsearch process but memory is not locked"; + } + + // visible for testing + boolean isMemoryLocked() { + return Natives.isMemoryLocked(); + } + + } + + static class MaxNumberOfThreadsCheck implements Check { + + private final long maxNumberOfThreadsThreshold = 1 << 15; + + @Override + public boolean check() { + return getMaxNumberOfThreads() != -1 && getMaxNumberOfThreads() < maxNumberOfThreadsThreshold; + } + + @Override + public String errorMessage() { + return String.format( + Locale.ROOT, + "max number of threads [%d] for user [%s] likely too low, increase to at least [%d]", + getMaxNumberOfThreads(), + BootstrapInfo.getSystemProperties().get("user.name"), + maxNumberOfThreadsThreshold); + } + + // visible for testing + long getMaxNumberOfThreads() { + return JNANatives.MAX_NUMBER_OF_THREADS; + } + + } + +} diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java index 64b685c4150..b9d5ce11dbc 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -48,6 +48,9 @@ class JNANatives { // Set to true, in case policy can be applied to all threads of the process (even existing ones) // otherwise they are only inherited for new threads (ES app threads) static boolean LOCAL_SECCOMP_ALL = false; + // set to the maximum number of threads that can be created for + // the user ID that owns the running Elasticsearch process + static long MAX_NUMBER_OF_THREADS = -1; static void tryMlockall() { int errno = Integer.MIN_VALUE; @@ -103,13 +106,29 @@ class JNANatives { } } + static void trySetMaxNumberOfThreads() { + if (Constants.LINUX) { + // this is only valid on Linux and the value *is* different on OS X + // see /usr/include/sys/resource.h on OS X + // on Linux the resource RLIMIT_NPROC means *the number of threads* + // this is in opposition to BSD-derived OSes + final int rlimit_nproc = 6; + + final JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit(); + if (JNACLibrary.getrlimit(rlimit_nproc, rlimit) == 0) { + MAX_NUMBER_OF_THREADS = rlimit.rlim_cur.longValue(); + } else { + logger.warn("unable to retrieve max number of threads [" + JNACLibrary.strerror(Native.getLastError()) + "]"); + } + } + } + static String rlimitToString(long value) { assert Constants.LINUX || Constants.MAC_OS_X; if (value == JNACLibrary.RLIM_INFINITY) { return "unlimited"; } else { - // TODO, on java 8 use Long.toUnsignedString, since that's what it is. - return Long.toString(value); + return Long.toUnsignedString(value); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 44f2f4000bd..03506643eeb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -209,16 +209,6 @@ public class DiscoveryNode implements Streamable, ToXContent { this.version = version; } - /** - * Should this node form a connection to the provided node. - */ - public boolean shouldConnectTo(DiscoveryNode otherNode) { - if (clientNode() && otherNode.clientNode()) { - return false; - } - return true; - } - /** * The address that the node can be communicated with. */ diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index af5c6a2f44b..84a7130e334 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -574,9 +574,6 @@ public class InternalClusterService extends AbstractLifecycleComponent listeners = new CopyOnWriteArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 41942d71578..dbf1644cbfe 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -34,7 +34,7 @@ public abstract class ESLoggerFactory { public static final Setting LOG_DEFAULT_LEVEL_SETTING = new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, SettingsProperty.ClusterScope); public static final Setting LOG_LEVEL_SETTING = - Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, + Setting.prefixKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, SettingsProperty.Dynamic, SettingsProperty.ClusterScope); public static ESLogger getLogger(String prefix, String name) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java index c3ea39a06a2..e0e03b18e12 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java @@ -42,6 +42,7 @@ import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity.SimScorer; import org.apache.lucene.search.similarities.Similarity.SimWeight; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.SmallFloat; import org.apache.lucene.util.ToStringUtils; import java.io.IOException; @@ -186,9 +187,13 @@ public final class AllTermQuery extends Query { float boost; if (payload == null) { boost = 1; - } else { - assert payload.length == 4; + } else if (payload.length == 1) { + boost = SmallFloat.byte315ToFloat(payload.bytes[payload.offset]); + } else if (payload.length == 4) { + // TODO: for bw compat only, remove this in 6.0 boost = PayloadHelper.decodeFloat(payload.bytes, payload.offset); + } else { + throw new IllegalStateException("Payloads are expected to have a length of 1 or 4 but got: " + payload); } payloadBoost += boost; } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTokenStream.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTokenStream.java index 076cbacef1e..2a4db83d429 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTokenStream.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTokenStream.java @@ -25,11 +25,10 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.SmallFloat; import java.io.IOException; -import static org.apache.lucene.analysis.payloads.PayloadHelper.encodeFloat; - /** * */ @@ -39,7 +38,7 @@ public final class AllTokenStream extends TokenFilter { return new AllTokenStream(analyzer.tokenStream(allFieldName, allEntries), allEntries); } - private final BytesRef payloadSpare = new BytesRef(new byte[4]); + private final BytesRef payloadSpare = new BytesRef(new byte[1]); private final AllEntries allEntries; @@ -64,7 +63,7 @@ public final class AllTokenStream extends TokenFilter { } final float boost = allEntries.boost(offsetAttribute.startOffset()); if (boost != 1.0f) { - encodeFloat(boost, payloadSpare.bytes, payloadSpare.offset); + payloadSpare.bytes[0] = SmallFloat.floatToByte315(boost); payloadAttribute.setPayload(payloadSpare); } else { payloadAttribute.setPayload(null); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 0aab078d4ef..83de725a83a 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -76,7 +76,7 @@ public class FilterableTermsEnum extends TermsEnum { this.docsEnumFlag = docsEnumFlag; if (filter == null) { // Important - need to use the doc count that includes deleted docs - // or we have this issue: https://github.com/elasticsearch/elasticsearch/issues/7951 + // or we have this issue: https://github.com/elastic/elasticsearch/issues/7951 numDocs = reader.maxDoc(); } List leaves = reader.leaves(); diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index b8569b9eff0..63ee81c30b7 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -296,12 +296,25 @@ public abstract class AbstractScopedSettings extends AbstractComponent { } for (Map.Entry> entry : complexMatchers.entrySet()) { if (entry.getValue().match(key)) { + assert assertMatcher(key, 1); return entry.getValue().getConcreteSetting(key); } } return null; } + private boolean assertMatcher(String key, int numComplexMatchers) { + List> list = new ArrayList<>(); + for (Map.Entry> entry : complexMatchers.entrySet()) { + if (entry.getValue().match(key)) { + list.add(entry.getValue().getConcreteSetting(key)); + } + } + assert list.size() == numComplexMatchers : "Expected " + numComplexMatchers + " complex matchers to match key [" + + key + "] but got: " + list.toString(); + return true; + } + /** * Returns true if the setting for the given key is dynamically updateable. Otherwise false. */ diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 394180d27f5..12adef7b4bb 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -113,7 +113,7 @@ public class Setting extends ToXContentToBytes { private static final ESLogger logger = Loggers.getLogger(Setting.class); private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); - private final String key; + private final Key key; protected final Function defaultValue; private final Function parser; private final EnumSet properties; @@ -125,7 +125,7 @@ public class Setting extends ToXContentToBytes { * @param parser a parser that parses the string rep into a complex datatype. * @param properties properties for this setting like scope, filtering... */ - public Setting(String key, Function defaultValue, Function parser, SettingsProperty... properties) { + public Setting(Key key, Function defaultValue, Function parser, SettingsProperty... properties) { assert parser.apply(defaultValue.apply(Settings.EMPTY)) != null || this.isGroupSetting(): "parser returned null"; this.key = key; this.defaultValue = defaultValue; @@ -160,6 +160,18 @@ public class Setting extends ToXContentToBytes { this(key, s -> defaultValue, parser, properties); } + /** + * Creates a new Setting instance + * @param key the settings key for this setting. + * @param defaultValue a default value function that returns the default values string representation. + * @param parser a parser that parses the string rep into a complex datatype. + * @param dynamic true iff this setting can be dynamically updateable + * @param scope the scope of this setting + */ + public Setting(String key, Function defaultValue, Function parser, boolean dynamic, Scope scope) { + this(new SimpleKey(key), defaultValue, parser, dynamic, scope); + } + /** * Creates a new Setting instance * @param key the settings key for this setting. @@ -179,6 +191,13 @@ public class Setting extends ToXContentToBytes { * @see #isGroupSetting() */ public final String getKey() { + return key.toString(); + } + + /** + * Returns the original representation of a setting key. + */ + public final Key getRawKey() { return key; } @@ -265,7 +284,7 @@ public class Setting extends ToXContentToBytes { * Returns true iff this setting is present in the given settings object. Otherwise false */ public final boolean exists(Settings settings) { - return settings.get(key) != null; + return settings.get(getKey()) != null; } /** @@ -298,7 +317,7 @@ public class Setting extends ToXContentToBytes { deprecationLogger.deprecated("[{}] setting was deprecated in Elasticsearch and it will be removed in a future release! " + "See the breaking changes lists in the documentation for details", getKey()); } - return settings.get(key, defaultValue.apply(settings)); + return settings.get(getKey(), defaultValue.apply(settings)); } /** @@ -306,14 +325,14 @@ public class Setting extends ToXContentToBytes { * given key is part of the settings group. * @see #isGroupSetting() */ - public boolean match(String toTest) { - return key.equals(toTest); + public final boolean match(String toTest) { + return key.match(toTest); } @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("key", key); + builder.field("key", key.toString()); builder.field("properties", properties); builder.field("is_group_setting", isGroupSetting()); builder.field("default", defaultValue.apply(Settings.EMPTY)); @@ -486,6 +505,14 @@ public class Setting extends ToXContentToBytes { return value; } + public static TimeValue parseTimeValue(String s, TimeValue minValue, String key) { + TimeValue timeValue = TimeValue.parseTimeValue(s, null, key); + if (timeValue.millis() < minValue.millis()) { + throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return timeValue; + } + public static Setting intSetting(String key, int defaultValue, SettingsProperty... properties) { return intSetting(key, defaultValue, Integer.MIN_VALUE, properties); } @@ -535,20 +562,15 @@ public class Setting extends ToXContentToBytes { Function> parser = (s) -> parseableStringToList(s).stream().map(singleValueParser).collect(Collectors.toList()); - return new Setting>(key, (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, - properties) { + return new Setting>(new ListKey(key), + (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, properties) { private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?"); @Override public String getRaw(Settings settings) { - String[] array = settings.getAsArray(key, null); + String[] array = settings.getAsArray(getKey(), null); return array == null ? defaultValue.apply(settings) : arrayToParsableString(array); } - @Override - public boolean match(String toTest) { - return pattern.matcher(toTest).matches(); - } - @Override boolean hasComplexMatcher() { return true; @@ -591,11 +613,12 @@ public class Setting extends ToXContentToBytes { } public static Setting groupSetting(String key, SettingsProperty... properties) { + // TODO CHECK IF WE REMOVE if (key.endsWith(".") == false) { throw new IllegalArgumentException("key must end with a '.'"); } - return new Setting(key, "", (s) -> null, properties) { - + // TODO CHECK IF WE REMOVE -END + return new Setting(new GroupKey(key), (s) -> "", (s) -> null, properties) { @Override public boolean isGroupSetting() { return true; @@ -603,12 +626,7 @@ public class Setting extends ToXContentToBytes { @Override public Settings get(Settings settings) { - return settings.getByPrefix(key); - } - - @Override - public boolean match(String toTest) { - return Regex.simpleMatch(key + "*", toTest); + return settings.getByPrefix(getKey()); } @Override @@ -701,11 +719,28 @@ public class Setting extends ToXContentToBytes { /** * This setting type allows to validate settings that have the same type and a common prefix. For instance feature.${type}=[true|false] - * can easily be added with this setting. Yet, dynamic key settings don't support updaters out of the box unless {@link #getConcreteSetting(String)} - * is used to pull the updater. + * can easily be added with this setting. Yet, prefix key settings don't support updaters out of the box unless + * {@link #getConcreteSetting(String)} is used to pull the updater. */ - public static Setting dynamicKeySetting(String key, String defaultValue, Function parser, - SettingsProperty... properties) { + public static Setting prefixKeySetting(String prefix, String defaultValue, Function parser, boolean dynamic, Scope scope) { + return affixKeySetting(AffixKey.withPrefix(prefix), (s) -> defaultValue, parser, dynamic, scope); + } + + /** + * This setting type allows to validate settings that have the same type and a common prefix and suffix. For instance + * storage.${backend}.enable=[true|false] can easily be added with this setting. Yet, adfix key settings don't support updaters + * out of the box unless {@link #getConcreteSetting(String)} is used to pull the updater. + */ + public static Setting adfixKeySetting(String prefix, String suffix, Function defaultValue, Function parser, boolean dynamic, Scope scope) { + return affixKeySetting(AffixKey.withAdfix(prefix, suffix), defaultValue, parser, dynamic, scope); + } + + public static Setting adfixKeySetting(String prefix, String suffix, String defaultValue, Function parser, boolean dynamic, Scope scope) { + return adfixKeySetting(prefix, suffix, (s) -> defaultValue, parser, dynamic, scope); + } + + public static Setting affixKeySetting(AffixKey key, Function defaultValue, Function parser, + SettingsProperty... properties) { return new Setting(key, defaultValue, parser, properties) { @Override @@ -713,14 +748,9 @@ public class Setting extends ToXContentToBytes { return true; } - @Override - public boolean match(String toTest) { - return toTest.startsWith(getKey()); - } - @Override AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer validator) { - throw new UnsupportedOperationException("dynamic settings can't be updated use #getConcreteSetting for updating"); + throw new UnsupportedOperationException("Affix settings can't be updated. Use #getConcreteSetting for updating."); } @Override @@ -728,9 +758,145 @@ public class Setting extends ToXContentToBytes { if (match(key)) { return new Setting<>(key, defaultValue, parser, properties); } else { - throw new IllegalArgumentException("key must match setting but didn't ["+key +"]"); + throw new IllegalArgumentException("key [" + key + "] must match [" + getKey() + "] but didn't."); } } }; } + + + public interface Key { + boolean match(String key); + } + + public static class SimpleKey implements Key { + protected final String key; + + public SimpleKey(String key) { + this.key = key; + } + + @Override + public boolean match(String key) { + return this.key.equals(key); + } + + @Override + public String toString() { + return key; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SimpleKey simpleKey = (SimpleKey) o; + return Objects.equals(key, simpleKey.key); + } + + @Override + public int hashCode() { + return Objects.hash(key); + } + } + + public static final class GroupKey extends SimpleKey { + public GroupKey(String key) { + super(key); + if (key.endsWith(".") == false) { + throw new IllegalArgumentException("key must end with a '.'"); + } + } + + @Override + public boolean match(String toTest) { + return Regex.simpleMatch(key + "*", toTest); + } + } + + public static final class ListKey extends SimpleKey { + private final Pattern pattern; + + public ListKey(String key) { + super(key); + this.pattern = Pattern.compile(Pattern.quote(key) + "(\\.\\d+)?"); + } + + @Override + public boolean match(String toTest) { + return pattern.matcher(toTest).matches(); + } + } + + public static final class AffixKey implements Key { + public static AffixKey withPrefix(String prefix) { + return new AffixKey(prefix, null); + } + + public static AffixKey withAdfix(String prefix, String suffix) { + return new AffixKey(prefix, suffix); + } + + private final String prefix; + private final String suffix; + + public AffixKey(String prefix, String suffix) { + assert prefix != null || suffix != null: "Either prefix or suffix must be non-null"; + this.prefix = prefix; + this.suffix = suffix; + } + + @Override + public boolean match(String key) { + boolean match = true; + if (prefix != null) { + match = key.startsWith(prefix); + } + if (suffix != null) { + match = match && key.endsWith(suffix); + } + return match; + } + + public SimpleKey toConcreteKey(String missingPart) { + StringBuilder key = new StringBuilder(); + if (prefix != null) { + key.append(prefix); + } + key.append(missingPart); + if (suffix != null) { + key.append("."); + key.append(suffix); + } + return new SimpleKey(key.toString()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + if (prefix != null) { + sb.append(prefix); + } + if (suffix != null) { + sb.append("*"); + sb.append(suffix); + sb.append("."); + } + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AffixKey that = (AffixKey) o; + return Objects.equals(prefix, that.prefix) && + Objects.equals(suffix, that.suffix); + } + + @Override + public int hashCode() { + return Objects.hash(prefix, suffix); + } + } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 0e112118da8..3c5583440e0 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -22,11 +22,13 @@ package org.elasticsearch.index.engine; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.SnapshotDeletionPolicy; @@ -36,12 +38,15 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SearcherManager; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -64,8 +69,11 @@ import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import java.io.Closeable; +import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.file.NoSuchFileException; import java.util.Arrays; +import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.List; @@ -406,7 +414,7 @@ public abstract class Engine implements Closeable { /** * Global stats on segments. */ - public final SegmentsStats segmentsStats() { + public final SegmentsStats segmentsStats(boolean includeSegmentFileSizes) { ensureOpen(); try (final Searcher searcher = acquireSearcher("segments_stats")) { SegmentsStats stats = new SegmentsStats(); @@ -418,12 +426,81 @@ public abstract class Engine implements Closeable { stats.addTermVectorsMemoryInBytes(guardedRamBytesUsed(segmentReader.getTermVectorsReader())); stats.addNormsMemoryInBytes(guardedRamBytesUsed(segmentReader.getNormsReader())); stats.addDocValuesMemoryInBytes(guardedRamBytesUsed(segmentReader.getDocValuesReader())); + + if (includeSegmentFileSizes) { + // TODO: consider moving this to StoreStats + stats.addFileSizes(getSegmentFileSizes(segmentReader)); + } } writerSegmentStats(stats); return stats; } } + private ImmutableOpenMap getSegmentFileSizes(SegmentReader segmentReader) { + Directory directory = null; + SegmentCommitInfo segmentCommitInfo = segmentReader.getSegmentInfo(); + boolean useCompoundFile = segmentCommitInfo.info.getUseCompoundFile(); + if (useCompoundFile) { + try { + directory = engineConfig.getCodec().compoundFormat().getCompoundReader(segmentReader.directory(), segmentCommitInfo.info, IOContext.READ); + } catch (IOException e) { + logger.warn("Error when opening compound reader for Directory [{}] and SegmentCommitInfo [{}]", e, + segmentReader.directory(), segmentCommitInfo); + + return ImmutableOpenMap.of(); + } + } else { + directory = segmentReader.directory(); + } + + assert directory != null; + + String[] files; + if (useCompoundFile) { + try { + files = directory.listAll(); + } catch (IOException e) { + logger.warn("Couldn't list Compound Reader Directory [{}]", e, directory); + return ImmutableOpenMap.of(); + } + } else { + try { + files = segmentReader.getSegmentInfo().files().toArray(new String[]{}); + } catch (IOException e) { + logger.warn("Couldn't list Directory from SegmentReader [{}] and SegmentInfo [{}]", e, segmentReader, segmentReader.getSegmentInfo()); + return ImmutableOpenMap.of(); + } + } + + ImmutableOpenMap.Builder map = ImmutableOpenMap.builder(); + for (String file : files) { + String extension = IndexFileNames.getExtension(file); + long length = 0L; + try { + length = directory.fileLength(file); + } catch (NoSuchFileException | FileNotFoundException e) { + logger.warn("Tried to query fileLength but file is gone [{}] [{}]", e, directory, file); + } catch (IOException e) { + logger.warn("Error when trying to query fileLength [{}] [{}]", e, directory, file); + } + if (length == 0L) { + continue; + } + map.put(extension, length); + } + + if (useCompoundFile && directory != null) { + try { + directory.close(); + } catch (IOException e) { + logger.warn("Error when closing compound reader on Directory [{}]", e, directory); + } + } + + return map.build(); + } + protected void writerSegmentStats(SegmentsStats stats) { // by default we don't have a writer here... subclasses can override this stats.addVersionMapMemoryInBytes(0); diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index dd023363984..c412ce3b85f 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -928,12 +928,6 @@ public class InternalEngine extends Engine { iwc.setSimilarity(engineConfig.getSimilarity()); iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().mbFrac()); iwc.setCodec(engineConfig.getCodec()); - /* We set this timeout to a highish value to work around - * the default poll interval in the Lucene lock that is - * 1000ms by default. We might need to poll multiple times - * here but with 1s poll this is only executed twice at most - * in combination with the default writelock timeout*/ - iwc.setWriteLockTimeout(5000); iwc.setUseCompoundFile(true); // always use compound on flush - reduces # of file-handles on refresh // Warm-up hook for newly-merged segments. Warming up segments here is better since it will be performed at the end // of the merge operation and won't slow down _refresh diff --git a/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java b/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java index 229750e840e..d2c4a3c14c0 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java +++ b/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java @@ -19,6 +19,10 @@ package org.elasticsearch.index.engine; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + +import org.elasticsearch.Version; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -28,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; +import java.util.Iterator; public class SegmentsStats implements Streamable, ToXContent { @@ -42,6 +47,33 @@ public class SegmentsStats implements Streamable, ToXContent { private long indexWriterMaxMemoryInBytes; private long versionMapMemoryInBytes; private long bitsetMemoryInBytes; + private ImmutableOpenMap fileSizes = ImmutableOpenMap.of(); + + /* + * A map to provide a best-effort approach describing Lucene index files. + * + * Ideally this should be in sync to what the current version of Lucene is using, but it's harmless to leave extensions out, + * they'll just miss a proper description in the stats + */ + private static ImmutableOpenMap fileDescriptions = ImmutableOpenMap.builder() + .fPut("si", "Segment Info") + .fPut("fnm", "Fields") + .fPut("fdx", "Field Index") + .fPut("fdt", "Field Data") + .fPut("tim", "Term Dictionary") + .fPut("tip", "Term Index") + .fPut("doc", "Frequencies") + .fPut("pos", "Positions") + .fPut("pay", "Payloads") + .fPut("nvd", "Norms") + .fPut("nvm", "Norms") + .fPut("dvd", "DocValues") + .fPut("dvm", "DocValues") + .fPut("tvx", "Term Vector Index") + .fPut("tvd", "Term Vector Documents") + .fPut("tvf", "Term Vector Fields") + .fPut("liv", "Live Documents") + .build(); public SegmentsStats() {} @@ -49,7 +81,7 @@ public class SegmentsStats implements Streamable, ToXContent { this.count += count; this.memoryInBytes += memoryInBytes; } - + public void addTermsMemoryInBytes(long termsMemoryInBytes) { this.termsMemoryInBytes += termsMemoryInBytes; } @@ -86,6 +118,22 @@ public class SegmentsStats implements Streamable, ToXContent { this.bitsetMemoryInBytes += bitsetMemoryInBytes; } + public void addFileSizes(ImmutableOpenMap fileSizes) { + ImmutableOpenMap.Builder map = ImmutableOpenMap.builder(this.fileSizes); + + for (Iterator> it = fileSizes.iterator(); it.hasNext();) { + ObjectObjectCursor entry = it.next(); + if (map.containsKey(entry.key)) { + Long oldValue = map.get(entry.key); + map.put(entry.key, oldValue + entry.value); + } else { + map.put(entry.key, entry.value); + } + } + + this.fileSizes = map.build(); + } + public void add(SegmentsStats mergeStats) { if (mergeStats == null) { return; @@ -100,6 +148,7 @@ public class SegmentsStats implements Streamable, ToXContent { addIndexWriterMaxMemoryInBytes(mergeStats.indexWriterMaxMemoryInBytes); addVersionMapMemoryInBytes(mergeStats.versionMapMemoryInBytes); addBitsetMemoryInBytes(mergeStats.bitsetMemoryInBytes); + addFileSizes(mergeStats.fileSizes); } /** @@ -219,6 +268,10 @@ public class SegmentsStats implements Streamable, ToXContent { return new ByteSizeValue(bitsetMemoryInBytes); } + public ImmutableOpenMap getFileSizes() { + return fileSizes; + } + public static SegmentsStats readSegmentsStats(StreamInput in) throws IOException { SegmentsStats stats = new SegmentsStats(); stats.readFrom(in); @@ -239,6 +292,15 @@ public class SegmentsStats implements Streamable, ToXContent { builder.byteSizeField(Fields.INDEX_WRITER_MAX_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MAX_MEMORY, indexWriterMaxMemoryInBytes); builder.byteSizeField(Fields.VERSION_MAP_MEMORY_IN_BYTES, Fields.VERSION_MAP_MEMORY, versionMapMemoryInBytes); builder.byteSizeField(Fields.FIXED_BIT_SET_MEMORY_IN_BYTES, Fields.FIXED_BIT_SET, bitsetMemoryInBytes); + builder.startObject(Fields.FILE_SIZES); + for (Iterator> it = fileSizes.iterator(); it.hasNext();) { + ObjectObjectCursor entry = it.next(); + builder.startObject(entry.key); + builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, entry.value); + builder.field(Fields.DESCRIPTION, fileDescriptions.getOrDefault(entry.key, "Others")); + builder.endObject(); + } + builder.endObject(); builder.endObject(); return builder; } @@ -266,6 +328,10 @@ public class SegmentsStats implements Streamable, ToXContent { static final XContentBuilderString VERSION_MAP_MEMORY_IN_BYTES = new XContentBuilderString("version_map_memory_in_bytes"); static final XContentBuilderString FIXED_BIT_SET = new XContentBuilderString("fixed_bit_set"); static final XContentBuilderString FIXED_BIT_SET_MEMORY_IN_BYTES = new XContentBuilderString("fixed_bit_set_memory_in_bytes"); + static final XContentBuilderString FILE_SIZES = new XContentBuilderString("file_sizes"); + static final XContentBuilderString SIZE = new XContentBuilderString("size"); + static final XContentBuilderString SIZE_IN_BYTES = new XContentBuilderString("size_in_bytes"); + static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description"); } @Override @@ -281,6 +347,19 @@ public class SegmentsStats implements Streamable, ToXContent { versionMapMemoryInBytes = in.readLong(); indexWriterMaxMemoryInBytes = in.readLong(); bitsetMemoryInBytes = in.readLong(); + + if (in.getVersion().onOrAfter(Version.V_5_0_0)) { + int size = in.readVInt(); + ImmutableOpenMap.Builder map = ImmutableOpenMap.builder(size); + for (int i = 0; i < size; i++) { + String key = in.readString(); + Long value = in.readLong(); + map.put(key, value); + } + fileSizes = map.build(); + } else { + fileSizes = ImmutableOpenMap.of(); + } } @Override @@ -296,5 +375,14 @@ public class SegmentsStats implements Streamable, ToXContent { out.writeLong(versionMapMemoryInBytes); out.writeLong(indexWriterMaxMemoryInBytes); out.writeLong(bitsetMemoryInBytes); + + if (out.getVersion().onOrAfter(Version.V_5_0_0)) { + out.writeVInt(fileSizes.size()); + for (Iterator> it = fileSizes.iterator(); it.hasNext();) { + ObjectObjectCursor entry = it.next(); + out.writeString(entry.key); + out.writeLong(entry.value); + } + } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 6e0865fcc0b..78b05117104 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -510,17 +510,17 @@ class DocumentParser implements Closeable { private static Mapper.Builder createBuilderFromFieldType(final ParseContext context, MappedFieldType fieldType, String currentFieldName) { Mapper.Builder builder = null; if (fieldType instanceof StringFieldType) { - builder = context.root().findTemplateBuilder(context, currentFieldName, "string"); + builder = context.root().findTemplateBuilder(context, currentFieldName, "string", "string"); if (builder == null) { builder = new StringFieldMapper.Builder(currentFieldName); } } else if (fieldType instanceof TextFieldType) { - builder = context.root().findTemplateBuilder(context, currentFieldName, "string"); + builder = context.root().findTemplateBuilder(context, currentFieldName, "text", "string"); if (builder == null) { builder = new TextFieldMapper.Builder(currentFieldName); } } else if (fieldType instanceof KeywordFieldType) { - builder = context.root().findTemplateBuilder(context, currentFieldName, "string"); + builder = context.root().findTemplateBuilder(context, currentFieldName, "keyword", "string"); if (builder == null) { builder = new KeywordFieldMapper.Builder(currentFieldName); } @@ -568,7 +568,7 @@ class DocumentParser implements Closeable { // we need to do it here so we can handle things like attachment templates, where calling // text (to see if its a date) causes the binary value to be cleared { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null); + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "text", null); if (builder != null) { return builder; } @@ -617,7 +617,7 @@ class DocumentParser implements Closeable { } Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string"); if (builder == null) { - builder = new StringFieldMapper.Builder(currentFieldName); + builder = new TextFieldMapper.Builder(currentFieldName); } return builder; } else if (token == XContentParser.Token.VALUE_NUMBER) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index f5a36b4de19..199de497c5c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -132,6 +132,10 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String fieldName, Map node, ParserContext parserContext) throws MapperParsingException { + if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { + throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] " + + "or [keyword] field instead for field [" + fieldName + "]"); + } StringFieldMapper.Builder builder = new StringFieldMapper.Builder(fieldName); // hack for the fact that string can't just accept true/false for // the index property and still accepts no/not_analyzed/analyzed @@ -236,6 +240,10 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc int positionIncrementGap, int ignoreAbove, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); + if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0)) { + throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] " + + "or [keyword] field instead for field [" + fieldType.name() + "]"); + } if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values"); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 69d5f60f114..f881d206f0c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -20,8 +20,6 @@ package org.elasticsearch.index.mapper.geo; import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.spatial.util.GeoHashUtils; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; @@ -40,9 +38,8 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.KeywordFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; -import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; import java.io.IOException; @@ -148,7 +145,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr public abstract Y build(BuilderContext context, String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, - StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo); + KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo); public Y build(Mapper.BuilderContext context) { GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType; @@ -168,11 +165,10 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType()); } - StringFieldMapper geoHashMapper = null; + KeywordFieldMapper geoHashMapper = null; if (enableGeoHash || enableGeoHashPrefix) { // TODO: possible also implicitly enable geohash if geohash precision is set - geoHashMapper = new StringFieldMapper.Builder(Names.GEOHASH).index(true).tokenized(false).includeInAll(false).store(fieldType.stored()) - .omitNorms(true).indexOptions(IndexOptions.DOCS).build(context); + geoHashMapper = new KeywordFieldMapper.Builder(Names.GEOHASH).index(true).includeInAll(false).store(fieldType.stored()).build(context); geoPointFieldType.setGeoHashEnabled(geoHashMapper.fieldType(), geoHashPrecision, enableGeoHashPrefix); } context.path().remove(); @@ -349,12 +345,12 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr protected DoubleFieldMapper lonMapper; - protected StringFieldMapper geoHashMapper; + protected KeywordFieldMapper geoHashMapper; protected Explicit ignoreMalformed; protected BaseGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, + DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); this.latMapper = latMapper; @@ -507,7 +503,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr @Override public FieldMapper updateFieldType(Map fullNameToFieldType) { BaseGeoPointFieldMapper updated = (BaseGeoPointFieldMapper) super.updateFieldType(fullNameToFieldType); - StringFieldMapper geoUpdated = geoHashMapper == null ? null : (StringFieldMapper) geoHashMapper.updateFieldType(fullNameToFieldType); + KeywordFieldMapper geoUpdated = geoHashMapper == null ? null : (KeywordFieldMapper) geoHashMapper.updateFieldType(fullNameToFieldType); DoubleFieldMapper latUpdated = latMapper == null ? null : (DoubleFieldMapper) latMapper.updateFieldType(fullNameToFieldType); DoubleFieldMapper lonUpdated = lonMapper == null ? null : (DoubleFieldMapper) lonMapper.updateFieldType(fullNameToFieldType); if (updated == this diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 9c8b4a1f468..0d84cf21812 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -33,7 +33,7 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.KeywordFieldMapper; import java.io.IOException; import java.util.Map; @@ -79,7 +79,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { @Override public GeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, - DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, + DoubleFieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { fieldType.setTokenized(false); if (context.indexCreatedVersion().before(Version.V_2_3_0)) { @@ -110,7 +110,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, - StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { + KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, copyTo); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index dcd57a42dc5..7e952c2bb2c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -24,7 +24,6 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoDistance; @@ -40,8 +39,8 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.KeywordFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; import java.io.IOException; @@ -110,7 +109,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement @Override public GeoPointFieldMapperLegacy build(BuilderContext context, String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, - DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, + DoubleFieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { fieldType.setTokenized(false); setupFieldType(context); @@ -268,7 +267,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement public GeoPointFieldMapperLegacy(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, - StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, + KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, Explicit coerce, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, copyTo); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index 5e87130865c..00de61acdb6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -235,10 +235,30 @@ public class RootObjectMapper extends ObjectMapper { return dynamicDateTimeFormatters; } - public Mapper.Builder findTemplateBuilder(ParseContext context, String name, String dynamicType) { - return findTemplateBuilder(context, name, dynamicType, dynamicType); + public Mapper.Builder findTemplateBuilder(ParseContext context, String name, String matchType) { + final String dynamicType; + switch (matchType) { + case "string": + // string is a corner case since a json string can either map to a + // text or keyword field in elasticsearch. For now we use text when + // unspecified. For other types, the mapping type matches the json + // type so we are fine + dynamicType = "text"; + break; + default: + dynamicType = matchType; + break; + } + return findTemplateBuilder(context, name, dynamicType, matchType); } + /** + * Find a template. Returns {@code null} if no template could be found. + * @param name the field name + * @param dynamicType the field type to give the field if the template does not define one + * @param matchType the type of the field in the json document or null if unknown + * @return a mapper builder, or null if there is no template for such a field + */ public Mapper.Builder findTemplateBuilder(ParseContext context, String name, String dynamicType, String matchType) { DynamicTemplate dynamicTemplate = findTemplate(context.path(), name, matchType); if (dynamicTemplate == null) { diff --git a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java index 281bc061170..da26fa90b35 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java @@ -132,7 +132,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder parentTypes = new HashSet<>(5); - parentTypes.add(parentDocMapper.type()); + Set childTypes = new HashSet<>(); ParentChildIndexFieldData parentChildIndexFieldData = null; for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) { ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper(); - if (parentFieldMapper.active()) { - DocumentMapper parentTypeDocumentMapper = context.getMapperService().documentMapper(parentFieldMapper.type()); + if (parentFieldMapper.active() && type.equals(parentFieldMapper.type())) { + childTypes.add(documentMapper.type()); parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType()); - if (parentTypeDocumentMapper == null) { - // Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent. - parentTypes.add(parentFieldMapper.type()); - } } } - if (parentChildIndexFieldData == null) { - throw new QueryShardException(context, "[has_parent] no _parent field configured"); - } - Query parentTypeQuery = null; - if (parentTypes.size() == 1) { - DocumentMapper documentMapper = context.getMapperService().documentMapper(parentTypes.iterator().next()); - if (documentMapper != null) { - parentTypeQuery = documentMapper.typeFilter(); - } + if (childTypes.isEmpty()) { + throw new QueryShardException(context, "[" + NAME + "] no child types found for type [" + type + "]"); + } + + Query childrenQuery; + if (childTypes.size() == 1) { + DocumentMapper documentMapper = context.getMapperService().documentMapper(childTypes.iterator().next()); + childrenQuery = documentMapper.typeFilter(); } else { - BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder(); - for (String parentTypeStr : parentTypes) { - DocumentMapper documentMapper = context.getMapperService().documentMapper(parentTypeStr); - if (documentMapper != null) { - parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD); - } + BooleanQuery.Builder childrenFilter = new BooleanQuery.Builder(); + for (String childrenTypeStr : childTypes) { + DocumentMapper documentMapper = context.getMapperService().documentMapper(childrenTypeStr); + childrenFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD); } - parentTypeQuery = parentsFilter.build(); - } - - if (parentTypeQuery == null) { - return null; + childrenQuery = childrenFilter.build(); } // wrap the query with type query innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter()); - Query childrenFilter = Queries.not(parentTypeQuery); - return new HasChildQueryBuilder.LateParsingQuery(childrenFilter, + return new HasChildQueryBuilder.LateParsingQuery(childrenQuery, innerQuery, HasChildQueryBuilder.DEFAULT_MIN_CHILDREN, HasChildQueryBuilder.DEFAULT_MAX_CHILDREN, diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 14b1278e51b..4d35755e159 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -630,8 +630,8 @@ public class IndexShard extends AbstractIndexShardComponent { return engine.getMergeStats(); } - public SegmentsStats segmentStats() { - SegmentsStats segmentsStats = getEngine().segmentsStats(); + public SegmentsStats segmentStats(boolean includeSegmentFileSizes) { + SegmentsStats segmentsStats = getEngine().segmentsStats(includeSegmentFileSizes); segmentsStats.addBitsetMemoryInBytes(shardBitsetFilterCache.getMemorySizeInBytes()); return segmentsStats; } diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java index 604ac1b2d1d..330787a68a3 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java @@ -926,13 +926,6 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements } Store.verify(indexOutput); indexOutput.close(); - // write the checksum - if (fileInfo.metadata().hasLegacyChecksum()) { - Store.LegacyChecksums legacyChecksums = new Store.LegacyChecksums(); - legacyChecksums.add(fileInfo.metadata()); - legacyChecksums.write(store); - - } store.directory().sync(Collections.singleton(fileInfo.physicalName())); success = true; } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java index 6452778964b..08af2843ce4 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java @@ -180,7 +180,6 @@ public class BlobStoreIndexShardSnapshot implements ToXContent, FromXContentBuil * * @return file checksum */ - @Nullable public String checksum() { return metadata.checksum(); } diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index 4c452d15928..ac84fd76179 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -29,8 +29,10 @@ import org.apache.lucene.store.NativeFSLockFactory; import org.apache.lucene.store.RateLimitedFSDirectory; import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.store.SimpleFSLockFactory; +import org.apache.lucene.store.SleepingLockWrapper; import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.Constants; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.settings.Setting; @@ -87,10 +89,12 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim final Path location = path.resolveIndex(); Files.createDirectories(location); Directory wrapped = newFSDirectory(location, indexSettings.getValue(INDEX_LOCK_FACTOR_SETTING)); + if (IndexMetaData.isOnSharedFilesystem(indexSettings.getSettings())) { + wrapped = new SleepingLockWrapper(wrapped, 5000); + } return new RateLimitedFSDirectory(wrapped, this, this) ; } - @Override public void onPause(long nanos) { rateLimitingTimeInNanos.inc(nanos); diff --git a/core/src/main/java/org/elasticsearch/index/store/LegacyVerification.java b/core/src/main/java/org/elasticsearch/index/store/LegacyVerification.java deleted file mode 100644 index d2b0a2815e9..00000000000 --- a/core/src/main/java/org/elasticsearch/index/store/LegacyVerification.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.store; - -import org.apache.lucene.index.CorruptIndexException; -import org.apache.lucene.store.BufferedChecksum; -import org.apache.lucene.store.IndexOutput; - -import java.io.IOException; -import java.util.zip.Adler32; -import java.util.zip.Checksum; - -/** - * Implements verification checks to the best extent possible - * against legacy segments. - *

- * For files since ES 1.3, we have a lucene checksum, and - * we verify both CRC32 + length from that. - * For older segment files, we have an elasticsearch Adler32 checksum - * and a length, except for commit points. - * For older commit points, we only have the length in metadata, - * but lucene always wrote a CRC32 checksum we can verify in the future, too. - * For (Jurassic?) files, we dont have an Adler32 checksum at all, - * since its optional in the protocol. But we always know the length. - * @deprecated only to support old segments - */ -@Deprecated -class LegacyVerification { - - // TODO: add a verifier for old lucene segments_N that also checks CRC. - // but for now, at least truncation is detected here (as length will be checked) - - /** - * verifies Adler32 + length for index files before lucene 4.8 - */ - static class Adler32VerifyingIndexOutput extends VerifyingIndexOutput { - final String adler32; - final long length; - final Checksum checksum = new BufferedChecksum(new Adler32()); - long written; - - public Adler32VerifyingIndexOutput(IndexOutput out, String adler32, long length) { - super(out); - this.adler32 = adler32; - this.length = length; - } - - @Override - public void verify() throws IOException { - if (written != length) { - throw new CorruptIndexException("expected length=" + length + " != actual length: " + written + " : file truncated?", out.toString()); - } - final String actualChecksum = Store.digestToString(checksum.getValue()); - if (!adler32.equals(actualChecksum)) { - throw new CorruptIndexException("checksum failed (hardware problem?) : expected=" + adler32 + - " actual=" + actualChecksum, out.toString()); - } - } - - @Override - public void writeByte(byte b) throws IOException { - out.writeByte(b); - checksum.update(b); - written++; - } - - @Override - public void writeBytes(byte[] bytes, int offset, int length) throws IOException { - out.writeBytes(bytes, offset, length); - checksum.update(bytes, offset, length); - written += length; - } - } - - /** - * verifies length for index files before lucene 4.8 - */ - static class LengthVerifyingIndexOutput extends VerifyingIndexOutput { - final long length; - long written; - - public LengthVerifyingIndexOutput(IndexOutput out, long length) { - super(out); - this.length = length; - } - - @Override - public void verify() throws IOException { - if (written != length) { - throw new CorruptIndexException("expected length=" + length + " != actual length: " + written + " : file truncated?", out.toString()); - } - } - - @Override - public void writeByte(byte b) throws IOException { - out.writeByte(b); - written++; - } - - @Override - public void writeBytes(byte[] bytes, int offset, int length) throws IOException { - out.writeBytes(bytes, offset, length); - written += length; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index a0cc8f11419..f467d25c594 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -50,7 +50,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -71,7 +70,6 @@ import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.RefCounted; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.env.ShardLock; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.AbstractIndexShardComponent; @@ -84,7 +82,6 @@ import java.io.IOException; import java.io.InputStream; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.sql.Time; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -122,8 +119,6 @@ import static java.util.Collections.unmodifiableMap; * */ public class Store extends AbstractIndexShardComponent implements Closeable, RefCounted { - private static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_4_8_0; - static final String CODEC = "store"; static final int VERSION_WRITE_THROWABLE= 2; // we write throwable since 2.0 static final int VERSION_STACK_TRACE = 1; // we write the stack trace too since 1.4.0 @@ -152,7 +147,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref this(shardId, indexSettings, directoryService, shardLock, OnClose.EMPTY); } - @Inject public Store(ShardId shardId, IndexSettings indexSettings, DirectoryService directoryService, ShardLock shardLock, OnClose onClose) throws IOException { super(shardId, indexSettings); final Settings settings = indexSettings.getSettings(); @@ -462,19 +456,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref IndexOutput output = directory().createOutput(fileName, context); boolean success = false; try { - if (metadata.hasLegacyChecksum()) { - logger.debug("create legacy adler32 output for {}", fileName); - output = new LegacyVerification.Adler32VerifyingIndexOutput(output, metadata.checksum(), metadata.length()); - } else if (metadata.checksum() == null) { - // TODO: when the file is a segments_N, we can still CRC-32 + length for more safety - // its had that checksum forever. - logger.debug("create legacy length-only output for {}", fileName); - output = new LegacyVerification.LengthVerifyingIndexOutput(output, metadata.length()); - } else { - assert metadata.writtenBy() != null; - assert metadata.writtenBy().onOrAfter(FIRST_LUCENE_CHECKSUM_VERSION); - output = new LuceneVerifyingIndexOutput(metadata, output); - } + assert metadata.writtenBy() != null; + assert metadata.writtenBy().onOrAfter(StoreFileMetaData.FIRST_LUCENE_CHECKSUM_VERSION); + output = new LuceneVerifyingIndexOutput(metadata, output); success = true; } finally { if (success == false) { @@ -491,12 +475,8 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref } public IndexInput openVerifyingInput(String filename, IOContext context, StoreFileMetaData metadata) throws IOException { - if (metadata.hasLegacyChecksum() || metadata.checksum() == null) { - logger.debug("open legacy input for {}", filename); - return directory().openInput(filename, context); - } assert metadata.writtenBy() != null; - assert metadata.writtenBy().onOrAfter(FIRST_LUCENE_CHECKSUM_VERSION); + assert metadata.writtenBy().onOrAfter(StoreFileMetaData.FIRST_LUCENE_CHECKSUM_VERSION); return new VerifyingIndexInput(directory().openInput(filename, context)); } @@ -524,32 +504,12 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref if (input.length() != md.length()) { // first check the length no matter how old this file is throw new CorruptIndexException("expected length=" + md.length() + " != actual length: " + input.length() + " : file truncated?", input); } - if (md.writtenBy() != null && md.writtenBy().onOrAfter(FIRST_LUCENE_CHECKSUM_VERSION)) { - // throw exception if the file is corrupt - String checksum = Store.digestToString(CodecUtil.checksumEntireFile(input)); - // throw exception if metadata is inconsistent - if (!checksum.equals(md.checksum())) { - throw new CorruptIndexException("inconsistent metadata: lucene checksum=" + checksum + - ", metadata checksum=" + md.checksum(), input); - } - } else if (md.hasLegacyChecksum()) { - // legacy checksum verification - no footer that we need to omit in the checksum! - final Checksum checksum = new Adler32(); - final byte[] buffer = new byte[md.length() > 4096 ? 4096 : (int) md.length()]; - final long len = input.length(); - long read = 0; - while (len > read) { - final long bytesLeft = len - read; - final int bytesToRead = bytesLeft < buffer.length ? (int) bytesLeft : buffer.length; - input.readBytes(buffer, 0, bytesToRead, false); - checksum.update(buffer, 0, bytesToRead); - read += bytesToRead; - } - String adler32 = Store.digestToString(checksum.getValue()); - if (!adler32.equals(md.checksum())) { - throw new CorruptIndexException("checksum failed (hardware problem?) : expected=" + md.checksum() + - " actual=" + adler32, input); - } + // throw exception if the file is corrupt + String checksum = Store.digestToString(CodecUtil.checksumEntireFile(input)); + // throw exception if metadata is inconsistent + if (!checksum.equals(md.checksum())) { + throw new CorruptIndexException("inconsistent metadata: lucene checksum=" + checksum + + ", metadata checksum=" + md.checksum(), input); } } } @@ -805,7 +765,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref final int size = in.readVInt(); Map metadata = new HashMap<>(); for (int i = 0; i < size; i++) { - StoreFileMetaData meta = StoreFileMetaData.readStoreFileMetaData(in); + StoreFileMetaData meta = new StoreFileMetaData(in); metadata.put(meta.name(), meta); } Map commitUserData = new HashMap<>(); @@ -842,14 +802,13 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref static LoadedMetadata loadMetadata(IndexCommit commit, Directory directory, ESLogger logger) throws IOException { long numDocs; Map builder = new HashMap<>(); - Map checksumMap = readLegacyChecksums(directory).v1(); Map commitUserDataBuilder = new HashMap<>(); try { final SegmentInfos segmentCommitInfos = Store.readSegmentsInfo(commit, directory); numDocs = Lucene.getNumDocs(segmentCommitInfos); commitUserDataBuilder.putAll(segmentCommitInfos.getUserData()); @SuppressWarnings("deprecation") - Version maxVersion = Version.LUCENE_4_0; // we don't know which version was used to write so we take the max version. + Version maxVersion = segmentCommitInfos.getMinSegmentLuceneVersion(); // we don't know which version was used to write so we take the max version. for (SegmentCommitInfo info : segmentCommitInfos) { final Version version = info.info.getVersion(); if (version == null) { @@ -860,26 +819,21 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref maxVersion = version; } for (String file : info.files()) { - String legacyChecksum = checksumMap.get(file); - if (version.onOrAfter(FIRST_LUCENE_CHECKSUM_VERSION)) { + if (version.onOrAfter(StoreFileMetaData.FIRST_LUCENE_CHECKSUM_VERSION)) { checksumFromLuceneFile(directory, file, builder, logger, version, SEGMENT_INFO_EXTENSION.equals(IndexFileNames.getExtension(file))); } else { - builder.put(file, new StoreFileMetaData(file, directory.fileLength(file), legacyChecksum, version)); + throw new IllegalStateException("version must be onOrAfter: " + StoreFileMetaData.FIRST_LUCENE_CHECKSUM_VERSION + " but was: " + version); } } } + if (maxVersion == null) { + maxVersion = StoreFileMetaData.FIRST_LUCENE_CHECKSUM_VERSION; + } final String segmentsFile = segmentCommitInfos.getSegmentsFileName(); - String legacyChecksum = checksumMap.get(segmentsFile); - if (maxVersion.onOrAfter(FIRST_LUCENE_CHECKSUM_VERSION)) { + if (maxVersion.onOrAfter(StoreFileMetaData.FIRST_LUCENE_CHECKSUM_VERSION)) { checksumFromLuceneFile(directory, segmentsFile, builder, logger, maxVersion, true); } else { - final BytesRefBuilder fileHash = new BytesRefBuilder(); - final long length; - try (final IndexInput in = directory.openInput(segmentsFile, IOContext.READONCE)) { - length = in.length(); - hashFile(fileHash, new InputStreamIndexInput(in, length), length); - } - builder.put(segmentsFile, new StoreFileMetaData(segmentsFile, length, legacyChecksum, maxVersion, fileHash.get())); + throw new IllegalStateException("version must be onOrAfter: " + StoreFileMetaData.FIRST_LUCENE_CHECKSUM_VERSION + " but was: " + maxVersion); } } catch (CorruptIndexException | IndexNotFoundException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we either know the index is corrupted or it's just not there @@ -904,61 +858,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref return new LoadedMetadata(unmodifiableMap(builder), unmodifiableMap(commitUserDataBuilder), numDocs); } - /** - * Reads legacy checksum files found in the directory. - *

- * Files are expected to start with _checksums- prefix - * followed by long file version. Only file with the highest version is read, all other files are ignored. - * - * @param directory the directory to read checksums from - * @return a map of file checksums and the checksum file version - */ - @SuppressWarnings("deprecation") // Legacy checksum needs legacy methods - static Tuple, Long> readLegacyChecksums(Directory directory) throws IOException { - synchronized (directory) { - long lastFound = -1; - for (String name : directory.listAll()) { - if (!isChecksum(name)) { - continue; - } - long current = Long.parseLong(name.substring(CHECKSUMS_PREFIX.length())); - if (current > lastFound) { - lastFound = current; - } - } - if (lastFound > -1) { - try (IndexInput indexInput = directory.openInput(CHECKSUMS_PREFIX + lastFound, IOContext.READONCE)) { - indexInput.readInt(); // version - return new Tuple<>(indexInput.readStringStringMap(), lastFound); - } - } - return new Tuple<>(new HashMap<>(), -1L); - } - } - - /** - * Deletes all checksum files with version lower than newVersion. - * - * @param directory the directory to clean - * @param newVersion the latest checksum file version - */ - static void cleanLegacyChecksums(Directory directory, long newVersion) throws IOException { - synchronized (directory) { - for (String name : directory.listAll()) { - if (isChecksum(name)) { - long current = Long.parseLong(name.substring(CHECKSUMS_PREFIX.length())); - if (current < newVersion) { - try { - directory.deleteFile(name); - } catch (IOException ex) { - logger.debug("can't delete old checksum file [{}]", ex, name); - } - } - } - } - } - } - private static void checksumFromLuceneFile(Directory directory, String file, Map builder, ESLogger logger, Version version, boolean readFileAsHash) throws IOException { final String checksum; @@ -1227,64 +1126,13 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref } } - public final static class LegacyChecksums { - private final Map legacyChecksums = new HashMap<>(); - - public void add(StoreFileMetaData metaData) throws IOException { - - if (metaData.hasLegacyChecksum()) { - synchronized (this) { - // we don't add checksums if they were written by LUCENE_48... now we are using the build in mechanism. - legacyChecksums.put(metaData.name(), metaData.checksum()); - } - } - } - - public synchronized void write(Store store) throws IOException { - synchronized (store.directory) { - Tuple, Long> tuple = MetadataSnapshot.readLegacyChecksums(store.directory); - tuple.v1().putAll(legacyChecksums); - if (!tuple.v1().isEmpty()) { - writeChecksums(store.directory, tuple.v1(), tuple.v2()); - } - } - } - - @SuppressWarnings("deprecation") // Legacy checksum uses legacy methods - synchronized void writeChecksums(Directory directory, Map checksums, long lastVersion) throws IOException { - // Make sure if clock goes backwards we still move version forwards: - long nextVersion = Math.max(lastVersion+1, System.currentTimeMillis()); - final String checksumName = CHECKSUMS_PREFIX + nextVersion; - try (IndexOutput output = directory.createOutput(checksumName, IOContext.DEFAULT)) { - output.writeInt(0); // version - output.writeStringStringMap(checksums); - } - directory.sync(Collections.singleton(checksumName)); - MetadataSnapshot.cleanLegacyChecksums(directory, nextVersion); - } - - public void clear() { - this.legacyChecksums.clear(); - } - - public void remove(String name) { - legacyChecksums.remove(name); - } - } - - public static final String CHECKSUMS_PREFIX = "_checksums-"; - - public static boolean isChecksum(String name) { - // TODO can we drowp .cks - return name.startsWith(CHECKSUMS_PREFIX) || name.endsWith(".cks"); // bwcomapt - .cks used to be a previous checksum file - } /** * Returns true if the file is auto-generated by the store and shouldn't be deleted during cleanup. * This includes write lock and checksum files */ public static boolean isAutogenerated(String name) { - return IndexWriter.WRITE_LOCK_NAME.equals(name) || isChecksum(name); + return IndexWriter.WRITE_LOCK_NAME.equals(name); } /** diff --git a/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java b/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java index 31ad068a581..cfd5dc8f066 100644 --- a/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java +++ b/core/src/main/java/org/elasticsearch/index/store/StoreFileMetaData.java @@ -25,35 +25,42 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import java.io.IOException; +import java.util.Objects; /** * */ -public class StoreFileMetaData implements Streamable { +public class StoreFileMetaData implements Writeable { - private String name; + public static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_4_8_0; + + private final String name; // the actual file size on "disk", if compressed, the compressed size - private long length; + private final long length; - private String checksum; + private final String checksum; - private Version writtenBy; + private final Version writtenBy; - private BytesRef hash; + private final BytesRef hash; - private StoreFileMetaData() { - } - - public StoreFileMetaData(String name, long length) { - this(name, length, null); + public StoreFileMetaData(StreamInput in) throws IOException { + name = in.readString(); + length = in.readVLong(); + checksum = in.readString(); + String versionString = in.readString(); + assert versionString != null; + writtenBy = Lucene.parseVersionLenient(versionString, FIRST_LUCENE_CHECKSUM_VERSION); + hash = in.readBytesRef(); } public StoreFileMetaData(String name, long length, String checksum) { - this(name, length, checksum, null, null); + this(name, length, checksum, FIRST_LUCENE_CHECKSUM_VERSION); } public StoreFileMetaData(String name, long length, String checksum, Version writtenBy) { @@ -61,6 +68,10 @@ public class StoreFileMetaData implements Streamable { } public StoreFileMetaData(String name, long length, String checksum, Version writtenBy, BytesRef hash) { + assert writtenBy != null && writtenBy.onOrAfter(FIRST_LUCENE_CHECKSUM_VERSION) : "index version less that " + + FIRST_LUCENE_CHECKSUM_VERSION + " are not supported but got: " + writtenBy; + Objects.requireNonNull(writtenBy, "writtenBy must not be null"); + Objects.requireNonNull(checksum, "checksum must not be null"); this.name = name; this.length = length; this.checksum = checksum; @@ -85,10 +96,8 @@ public class StoreFileMetaData implements Streamable { /** * Returns a string representation of the files checksum. Since Lucene 4.8 this is a CRC32 checksum written - * by lucene. Previously we use Adler32 on top of Lucene as the checksum algorithm, if {@link #hasLegacyChecksum()} returns - * true this is a Adler32 checksum. + * by lucene. */ - @Nullable public String checksum() { return this.checksum; } @@ -104,33 +113,22 @@ public class StoreFileMetaData implements Streamable { return length == other.length && checksum.equals(other.checksum) && hash.equals(other.hash); } - public static StoreFileMetaData readStoreFileMetaData(StreamInput in) throws IOException { - StoreFileMetaData md = new StoreFileMetaData(); - md.readFrom(in); - return md; - } - @Override public String toString() { return "name [" + name + "], length [" + length + "], checksum [" + checksum + "], writtenBy [" + writtenBy + "]" ; } @Override - public void readFrom(StreamInput in) throws IOException { - name = in.readString(); - length = in.readVLong(); - checksum = in.readOptionalString(); - String versionString = in.readOptionalString(); - writtenBy = Lucene.parseVersionLenient(versionString, null); - hash = in.readBytesRef(); + public StoreFileMetaData readFrom(StreamInput in) throws IOException { + return new StoreFileMetaData(in); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeVLong(length); - out.writeOptionalString(checksum); - out.writeOptionalString(writtenBy == null ? null : writtenBy.toString()); + out.writeString(checksum); + out.writeString(writtenBy.toString()); out.writeBytesRef(hash); } @@ -141,14 +139,6 @@ public class StoreFileMetaData implements Streamable { return writtenBy; } - /** - * Returns true iff the checksum is not null and if the file has NOT been written by - * a Lucene version greater or equal to Lucene 4.8 - */ - public boolean hasLegacyChecksum() { - return checksum != null && (writtenBy == null || writtenBy.onOrAfter(Version.LUCENE_4_8) == false); - } - /** * Returns a variable length hash of the file represented by this metadata object. This can be the file * itself if the file is small enough. If the length of the hash is 0 no hash value is available diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 661c8371537..29b7df0a859 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -76,8 +76,6 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget private final AtomicBoolean finished = new AtomicBoolean(); private final ConcurrentMap openIndexOutputs = ConcurrentCollections.newConcurrentMap(); - private final Store.LegacyChecksums legacyChecksums = new Store.LegacyChecksums(); - private final CancellableThreads cancellableThreads = new CancellableThreads(); // last time this status was accessed @@ -145,10 +143,6 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget return state().getStage(); } - public Store.LegacyChecksums legacyChecksums() { - return legacyChecksums; - } - /** renames all temporary files to their true name, potentially overriding existing files */ public void renameAllTempFiles() throws IOException { ensureRefCount(); @@ -281,7 +275,6 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget logger.trace("cleaning temporary file [{}]", file); store.deleteQuiet(file); } - legacyChecksums.clear(); } @Override @@ -344,8 +337,6 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget // to recover from in case of a full cluster shutdown just when this code executes... renameAllTempFiles(); final Store store = store(); - // now write checksums - legacyChecksums().write(store); try { store.cleanupAndVerify("recovery CleanFilesRequestHandler", sourceMetaData); } catch (CorruptIndexException | IndexFormatTooNewException | IndexFormatTooOldException ex) { @@ -399,8 +390,6 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget // we are done indexOutput.close(); } - // write the checksum - legacyChecksums().add(fileMetaData); final String temporaryFileName = getTempNameForFile(name); assert Arrays.asList(store.directory().listAll()).contains(temporaryFileName); store.directory().sync(Collections.singleton(temporaryFileName)); diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index c4e41132802..7169be7d1f1 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -172,7 +172,8 @@ public class Node implements Closeable { tmpSettings = TribeService.processSettings(tmpSettings); ESLogger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(tmpSettings)); - logger.info("version[{}], pid[{}], build[{}/{}]", version, JvmInfo.jvmInfo().pid(), Build.CURRENT.shortHash(), Build.CURRENT.date()); + final String displayVersion = version + (Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : ""); + logger.info("version[{}], pid[{}], build[{}/{}]", displayVersion, JvmInfo.jvmInfo().pid(), Build.CURRENT.shortHash(), Build.CURRENT.date()); logger.info("initializing ..."); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java index 2b3f0518c37..fb8e9c63740 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java @@ -109,6 +109,9 @@ public class RestNodesStatsAction extends BaseRestHandler { if (nodesStatsRequest.indices().isSet(Flag.Indexing) && (request.hasParam("types"))) { nodesStatsRequest.indices().types(request.paramAsStringArray("types", null)); } + if (nodesStatsRequest.indices().isSet(Flag.Segments) && (request.hasParam("include_segment_file_sizes"))) { + nodesStatsRequest.indices().includeSegmentFileSizes(true); + } client.admin().cluster().nodesStats(nodesStatsRequest, new RestToXContentListener(channel)); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java index e75dfcc4dc7..fa4371846f6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java @@ -104,6 +104,10 @@ public class RestIndicesStatsAction extends BaseRestHandler { indicesStatsRequest.fieldDataFields(request.paramAsStringArray("fielddata_fields", request.paramAsStringArray("fields", Strings.EMPTY_ARRAY))); } + if (indicesStatsRequest.segments() && request.hasParam("include_segment_file_sizes")) { + indicesStatsRequest.includeSegmentFileSizes(true); + } + client.admin().indices().stats(indicesStatsRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(IndicesStatsResponse response, XContentBuilder builder) throws Exception { diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java index b02609323ce..db11dee6539 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java @@ -23,8 +23,6 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Client; @@ -43,9 +41,6 @@ import org.elasticsearch.rest.action.support.RestActionListener; import org.elasticsearch.rest.action.support.RestResponseListener; import org.elasticsearch.rest.action.support.RestTable; -import java.util.HashMap; -import java.util.Map; - import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestNodeAttrsAction extends AbstractCatAction { @@ -73,17 +68,10 @@ public class RestNodeAttrsAction extends AbstractCatAction { public void processResponse(final ClusterStateResponse clusterStateResponse) { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); nodesInfoRequest.clear().jvm(false).os(false).process(true); - client.admin().cluster().nodesInfo(nodesInfoRequest, new RestActionListener(channel) { + client.admin().cluster().nodesInfo(nodesInfoRequest, new RestResponseListener(channel) { @Override - public void processResponse(final NodesInfoResponse nodesInfoResponse) { - NodesStatsRequest nodesStatsRequest = new NodesStatsRequest(); - nodesStatsRequest.clear().jvm(false).os(false).fs(false).indices(false).process(false); - client.admin().cluster().nodesStats(nodesStatsRequest, new RestResponseListener(channel) { - @Override - public RestResponse buildResponse(NodesStatsResponse nodesStatsResponse) throws Exception { - return RestTable.buildResponse(buildTable(request, clusterStateResponse, nodesInfoResponse, nodesStatsResponse), channel); - } - }); + public RestResponse buildResponse(NodesInfoResponse nodesInfoResponse) throws Exception { + return RestTable.buildResponse(buildTable(request, clusterStateResponse, nodesInfoResponse), channel); } }); } @@ -106,7 +94,7 @@ public class RestNodeAttrsAction extends AbstractCatAction { return table; } - private Table buildTable(RestRequest req, ClusterStateResponse state, NodesInfoResponse nodesInfo, NodesStatsResponse nodesStats) { + private Table buildTable(RestRequest req, ClusterStateResponse state, NodesInfoResponse nodesInfo) { boolean fullId = req.paramAsBoolean("full_id", false); DiscoveryNodes nodes = state.getState().nodes(); @@ -115,32 +103,22 @@ public class RestNodeAttrsAction extends AbstractCatAction { for (DiscoveryNode node : nodes) { NodeInfo info = nodesInfo.getNodesMap().get(node.id()); for(ObjectObjectCursor att : node.attributes()) { - buildRow(fullId, table, node, info, att.key, att.value); - } - if (info.getServiceAttributes() != null) { - for (Map.Entry entry : info.getServiceAttributes().entrySet()) { - buildRow(fullId, table, node, info, entry.getKey(), entry.getValue()); + table.startRow(); + table.addCell(node.name()); + table.addCell(fullId ? node.id() : Strings.substring(node.getId(), 0, 4)); + table.addCell(info == null ? null : info.getProcess().getId()); + table.addCell(node.getHostName()); + table.addCell(node.getHostAddress()); + if (node.address() instanceof InetSocketTransportAddress) { + table.addCell(((InetSocketTransportAddress) node.address()).address().getPort()); + } else { + table.addCell("-"); } + table.addCell(att.key); + table.addCell(att.value); + table.endRow(); } } - return table; } - - private final void buildRow(boolean fullId, Table table, DiscoveryNode node, NodeInfo info, String key, String value) { - table.startRow(); - table.addCell(node.name()); - table.addCell(fullId ? node.id() : Strings.substring(node.getId(), 0, 4)); - table.addCell(info == null ? null : info.getProcess().getId()); - table.addCell(node.getHostName()); - table.addCell(node.getHostAddress()); - if (node.address() instanceof InetSocketTransportAddress) { - table.addCell(((InetSocketTransportAddress) node.address()).address().getPort()); - } else { - table.addCell("-"); - } - table.addCell(key); - table.addCell(value); - table.endRow(); - } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index cbabaa45656..0605bc4dcab 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -33,8 +33,11 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.http.HttpInfo; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.request.RequestCacheStats; import org.elasticsearch.index.engine.SegmentsStats; @@ -92,7 +95,7 @@ public class RestNodesAction extends AbstractCatAction { @Override public void processResponse(final ClusterStateResponse clusterStateResponse) { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); - nodesInfoRequest.clear().jvm(true).os(true).process(true); + nodesInfoRequest.clear().jvm(true).os(true).process(true).http(true); client.admin().cluster().nodesInfo(nodesInfoRequest, new RestActionListener(channel) { @Override public void processResponse(final NodesInfoResponse nodesInfoResponse) { @@ -249,9 +252,14 @@ public class RestNodesAction extends AbstractCatAction { } else { table.addCell("-"); } - final Map serviceAttributes = info == null ? null : info.getServiceAttributes(); - if (serviceAttributes != null) { - table.addCell(serviceAttributes.getOrDefault("http_address", "-")); + final HttpInfo httpInfo = info == null ? null : info.getHttp(); + if (httpInfo != null) { + TransportAddress transportAddress = httpInfo.getAddress().publishAddress(); + if (transportAddress instanceof InetSocketTransportAddress) { + table.addCell(NetworkAddress.formatAddress(((InetSocketTransportAddress)transportAddress).address())); + } else { + table.addCell(transportAddress.toString()); + } } else { table.addCell("-"); } diff --git a/core/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java b/core/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java index 44de20eafb8..9284b36c388 100644 --- a/core/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java +++ b/core/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java @@ -45,7 +45,7 @@ import java.io.IOException; /** * To be implemented by {@link SearchScript} which can provided an {@link Explanation} of the score * This is currently not used inside elasticsearch but it is used, see for example here: - * https://github.com/elasticsearch/elasticsearch/issues/8561 + * https://github.com/elastic/elasticsearch/issues/8561 */ public interface ExplainableSearchScript extends LeafSearchScript { @@ -58,4 +58,4 @@ public interface ExplainableSearchScript extends LeafSearchScript { */ Explanation explain(Explanation subQueryScore) throws IOException; -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index a97bc2e21d7..0cb83dbd2f9 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -40,18 +40,17 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.script.Script; -import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.aggregations.AggregatorBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorParsers; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.rescore.RescoreBuilder; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -779,7 +778,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { fetchSourceContext = FetchSourceContext.parse(parser, context); } else if (context.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { - fieldNames.add(parser.text()); + field(parser.text()); } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { sort(parser.text()); } else if (context.parseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index 70d78e78f23..69e41910ff5 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -153,7 +153,7 @@ public class BulkProcessorIT extends ESIntegTestCase { assertMultiGetResponse(multiGetRequestBuilder.get(), numDocs); } - //https://github.com/elasticsearch/elasticsearch/issues/5038 + //https://github.com/elastic/elasticsearch/issues/5038 public void testBulkProcessorConcurrentRequestsNoNodeAvailableException() throws Exception { //we create a transport client with no nodes to make sure it throws NoNodeAvailableException Settings settings = Settings.builder() diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java new file mode 100644 index 00000000000..6cd877315cd --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -0,0 +1,256 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.action.bulk; + +import org.apache.lucene.util.Constants; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.TestClusterService; +import org.elasticsearch.test.transport.CapturingTransport; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.junit.Before; + +import java.nio.charset.StandardCharsets; +import java.util.HashSet; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.LongSupplier; + +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.mockito.Mockito.mock; + +public class TransportBulkActionTookTests extends ESTestCase { + + private ThreadPool threadPool; + + @Before + public void setUp() throws Exception { + super.setUp(); + threadPool = mock(ThreadPool.class); + } + + private TransportBulkAction createAction(boolean controlled, AtomicLong expected) { + CapturingTransport capturingTransport = new CapturingTransport(); + ClusterService clusterService = new TestClusterService(threadPool); + TransportService transportService = new TransportService(capturingTransport, threadPool); + transportService.start(); + transportService.acceptIncomingRequests(); + IndexNameExpressionResolver resolver = new Resolver(Settings.EMPTY); + ActionFilters actionFilters = new ActionFilters(new HashSet<>()); + + TransportCreateIndexAction createIndexAction = new TransportCreateIndexAction( + Settings.EMPTY, + transportService, + clusterService, + threadPool, + null, + actionFilters, + resolver); + + if (controlled) { + + return new TestTransportBulkAction( + Settings.EMPTY, + threadPool, + transportService, + clusterService, + null, + createIndexAction, + actionFilters, + resolver, + null, + expected::get) { + @Override + public void executeBulk(BulkRequest bulkRequest, ActionListener listener) { + expected.set(1000000); + super.executeBulk(bulkRequest, listener); + } + + @Override + void executeBulk( + BulkRequest bulkRequest, + long startTimeNanos, + ActionListener listener, + AtomicArray responses) { + expected.set(1000000); + super.executeBulk(bulkRequest, startTimeNanos, listener, responses); + } + }; + } else { + return new TestTransportBulkAction( + Settings.EMPTY, + threadPool, + transportService, + clusterService, + null, + createIndexAction, + actionFilters, + resolver, + null, + System::nanoTime) { + @Override + public void executeBulk(BulkRequest bulkRequest, ActionListener listener) { + long elapsed = spinForAtLeastOneMillisecond(); + expected.set(elapsed); + super.executeBulk(bulkRequest, listener); + } + + @Override + void executeBulk( + BulkRequest bulkRequest, + long startTimeNanos, + ActionListener listener, + AtomicArray responses) { + long elapsed = spinForAtLeastOneMillisecond(); + expected.set(elapsed); + super.executeBulk(bulkRequest, startTimeNanos, listener, responses); + } + }; + } + } + + // test unit conversion with a controlled clock + public void testTookWithControlledClock() throws Exception { + runTestTook(true); + } + + // test took advances with System#nanoTime + public void testTookWithRealClock() throws Exception { + runTestTook(false); + } + + private void runTestTook(boolean controlled) throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json"); + // translate Windows line endings (\r\n) to standard ones (\n) + if (Constants.WINDOWS) { + bulkAction = Strings.replace(bulkAction, "\r\n", "\n"); + } + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); + AtomicLong expected = new AtomicLong(); + TransportBulkAction action = createAction(controlled, expected); + action.doExecute(bulkRequest, new ActionListener() { + @Override + public void onResponse(BulkResponse bulkItemResponses) { + if (controlled) { + assertThat( + bulkItemResponses.getTook().getMillis(), + equalTo(TimeUnit.MILLISECONDS.convert(expected.get(), TimeUnit.NANOSECONDS))); + } else { + assertThat( + bulkItemResponses.getTook().getMillis(), + greaterThanOrEqualTo(TimeUnit.MILLISECONDS.convert(expected.get(), TimeUnit.NANOSECONDS))); + } + } + + @Override + public void onFailure(Throwable e) { + + } + }); + } + + static class Resolver extends IndexNameExpressionResolver { + public Resolver(Settings settings) { + super(settings); + } + + @Override + public String[] concreteIndices(ClusterState state, IndicesRequest request) { + return request.indices(); + } + } + + static class TestTransportBulkAction extends TransportBulkAction { + + public TestTransportBulkAction( + Settings settings, + ThreadPool threadPool, + TransportService transportService, + ClusterService clusterService, + TransportShardBulkAction shardBulkAction, + TransportCreateIndexAction createIndexAction, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + AutoCreateIndex autoCreateIndex, + LongSupplier relativeTimeProvider) { + super( + settings, + threadPool, + transportService, + clusterService, + shardBulkAction, + createIndexAction, + actionFilters, + indexNameExpressionResolver, + autoCreateIndex, + relativeTimeProvider); + } + + @Override + boolean needToCheck() { + return randomBoolean(); + } + + @Override + boolean shouldAutoCreate(String index, ClusterState state) { + return randomBoolean(); + } + + } + + static class TestTransportCreateIndexAction extends TransportCreateIndexAction { + + public TestTransportCreateIndexAction( + Settings settings, + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + MetaDataCreateIndexService createIndexService, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, transportService, clusterService, threadPool, createIndexService, actionFilters, indexNameExpressionResolver); + } + + @Override + protected void doExecute(Task task, CreateIndexRequest request, ActionListener listener) { + listener.onResponse(newResponse()); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java new file mode 100644 index 00000000000..0a6f94366f9 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -0,0 +1,226 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.nodes; + +import org.elasticsearch.Version; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeActionTests; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.TestClusterService; +import org.elasticsearch.test.transport.CapturingTransport; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReferenceArray; +import java.util.function.Supplier; + +public class TransportNodesActionTests extends ESTestCase { + + private static ThreadPool THREAD_POOL; + private static ClusterName CLUSTER_NAME = new ClusterName("test-cluster"); + + private TestClusterService clusterService; + private CapturingTransport transport; + private TestTransportNodesAction action; + + public void testRequestIsSentToEachNode() throws Exception { + TestNodesRequest request = new TestNodesRequest(); + PlainActionFuture listener = new PlainActionFuture<>(); + action.new AsyncAction(null, request, listener).start(); + Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); + int numNodes = clusterService.state().getNodes().size(); + // check a request was sent to the right number of nodes + assertEquals(numNodes, capturedRequests.size()); + } + + public void testNodesSelectors() { + int numSelectors = randomIntBetween(1, 5); + Set nodeSelectors = new HashSet<>(); + for (int i = 0; i < numSelectors; i++) { + nodeSelectors.add(randomFrom(NodeSelector.values()).selector); + } + int numNodeIds = randomIntBetween(0, 3); + String[] nodeIds = clusterService.state().nodes().nodes().keys().toArray(String.class); + for (int i = 0; i < numNodeIds; i++) { + String nodeId = randomFrom(nodeIds); + nodeSelectors.add(nodeId); + } + String[] finalNodesIds = nodeSelectors.toArray(new String[nodeSelectors.size()]); + TestNodesRequest request = new TestNodesRequest(finalNodesIds); + action.new AsyncAction(null, request, new PlainActionFuture<>()).start(); + Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); + assertEquals(clusterService.state().nodes().resolveNodesIds(finalNodesIds).length, capturedRequests.size()); + } + + private enum NodeSelector { + LOCAL("_local"), ELECTED_MASTER("_master"), MASTER_ELIGIBLE("master:true"), DATA("data:true"), CUSTOM_ATTRIBUTE("attr:value"); + + private final String selector; + + NodeSelector(String selector) { + this.selector = selector; + } + } + + @BeforeClass + public static void startThreadPool() { + THREAD_POOL = new ThreadPool(TransportBroadcastByNodeActionTests.class.getSimpleName()); + } + + @AfterClass + public static void destroyThreadPool() { + ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS); + // since static must set to null to be eligible for collection + THREAD_POOL = null; + } + + @Before + public void setUp() throws Exception { + super.setUp(); + transport = new CapturingTransport(); + clusterService = new TestClusterService(THREAD_POOL); + final TransportService transportService = new TransportService(transport, THREAD_POOL); + transportService.start(); + transportService.acceptIncomingRequests(); + int numNodes = randomIntBetween(3, 10); + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + List discoveryNodes = new ArrayList<>(); + for (int i = 0; i < numNodes; i++) { + Map attributes = new HashMap<>(); + if (randomBoolean()) { + attributes.put("master", Boolean.toString(randomBoolean())); + attributes.put("data", Boolean.toString(randomBoolean())); + attributes.put("ingest", Boolean.toString(randomBoolean())); + } else { + attributes.put("client", "true"); + } + if (frequently()) { + attributes.put("custom", randomBoolean() ? "match" : randomAsciiOfLengthBetween(3, 5)); + } + final DiscoveryNode node = newNode(i, attributes); + discoBuilder = discoBuilder.put(node); + discoveryNodes.add(node); + } + discoBuilder.localNodeId(randomFrom(discoveryNodes).id()); + discoBuilder.masterNodeId(randomFrom(discoveryNodes).id()); + ClusterState.Builder stateBuilder = ClusterState.builder(CLUSTER_NAME); + stateBuilder.nodes(discoBuilder); + ClusterState clusterState = stateBuilder.build(); + clusterService.setState(clusterState); + action = new TestTransportNodesAction( + Settings.EMPTY, + THREAD_POOL, + clusterService, + transportService, + new ActionFilters(Collections.emptySet()), + TestNodesRequest::new, + TestNodeRequest::new, + ThreadPool.Names.SAME + ); + } + + private static DiscoveryNode newNode(int nodeId, Map attributes) { + String node = "node_" + nodeId; + return new DiscoveryNode(node, node, DummyTransportAddress.INSTANCE, attributes, Version.CURRENT); + } + + private static class TestTransportNodesAction extends TransportNodesAction { + + TestTransportNodesAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService + transportService, ActionFilters actionFilters, Supplier request, + Supplier nodeRequest, String nodeExecutor) { + super(settings, "indices:admin/test", CLUSTER_NAME, threadPool, clusterService, transportService, actionFilters, + null, request, nodeRequest, nodeExecutor); + } + + @Override + protected TestNodesResponse newResponse(TestNodesRequest request, AtomicReferenceArray nodesResponses) { + final List nodeResponses = new ArrayList<>(); + for (int i = 0; i < nodesResponses.length(); i++) { + Object resp = nodesResponses.get(i); + if (resp instanceof TestNodeResponse) { + nodeResponses.add((TestNodeResponse) resp); + } + } + return new TestNodesResponse(nodeResponses); + } + + @Override + protected TestNodeRequest newNodeRequest(String nodeId, TestNodesRequest request) { + return new TestNodeRequest(); + } + + @Override + protected TestNodeResponse newNodeResponse() { + return new TestNodeResponse(); + } + + @Override + protected TestNodeResponse nodeOperation(TestNodeRequest request) { + return new TestNodeResponse(); + } + + @Override + protected boolean accumulateExceptions() { + return false; + } + } + + private static class TestNodesRequest extends BaseNodesRequest { + TestNodesRequest(String... nodesIds) { + super(nodesIds); + } + } + + private static class TestNodesResponse extends BaseNodesResponse { + + private final List nodeResponses; + + TestNodesResponse(List nodeResponses) { + this.nodeResponses = nodeResponses; + } + } + + private static class TestNodeRequest extends BaseNodeRequest { + } + + private static class TestNodeResponse extends BaseNodeResponse { + } +} diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index 35e33b378bf..cf4690e3616 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -82,7 +82,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { public void addToMappings(XContentBuilder mappingsBuilder) throws IOException { mappingsBuilder.startObject(name); - mappingsBuilder.field("type", "string"); + mappingsBuilder.field("type", "text"); String tv_settings; if (storedPositions && storedOffset && storedPayloads) { tv_settings = "with_positions_offsets_payloads"; diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java new file mode 100644 index 00000000000..45986eab00e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java @@ -0,0 +1,167 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; + +import static org.hamcrest.CoreMatchers.containsString; + +public class BootstrapCheckTests extends ESTestCase { + + public void testNonProductionMode() { + // nothing should happen since we are in non-production mode + BootstrapCheck.check(Settings.EMPTY); + } + + public void testFileDescriptorLimits() { + final boolean osX = randomBoolean(); // simulates OS X versus non-OS X + final int limit = osX ? 10240 : 1 << 16; + final AtomicLong maxFileDescriptorCount = new AtomicLong(randomIntBetween(1, limit - 1)); + final BootstrapCheck.FileDescriptorCheck check; + if (osX) { + check = new BootstrapCheck.OsXFileDescriptorCheck() { + @Override + long getMaxFileDescriptorCount() { + return maxFileDescriptorCount.get(); + } + }; + } else { + check = new BootstrapCheck.FileDescriptorCheck() { + @Override + long getMaxFileDescriptorCount() { + return maxFileDescriptorCount.get(); + } + }; + } + + try { + BootstrapCheck.check(true, Collections.singletonList(check)); + fail("should have failed due to max file descriptors too low"); + } catch (final RuntimeException e) { + assertThat(e.getMessage(), containsString("max file descriptors")); + } + + maxFileDescriptorCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); + + BootstrapCheck.check(true, Collections.singletonList(check)); + + // nothing should happen if current file descriptor count is + // not available + maxFileDescriptorCount.set(-1); + BootstrapCheck.check(true, Collections.singletonList(check)); + } + + public void testFileDescriptorLimitsThrowsOnInvalidLimit() { + final IllegalArgumentException e = + expectThrows( + IllegalArgumentException.class, + () -> new BootstrapCheck.FileDescriptorCheck(-randomIntBetween(0, Integer.MAX_VALUE))); + assertThat(e.getMessage(), containsString("limit must be positive but was")); + } + + public void testMlockallCheck() { + class MlockallCheckTestCase { + + private final boolean mlockallSet; + private final boolean isMemoryLocked; + private final boolean shouldFail; + + public MlockallCheckTestCase(final boolean mlockallSet, final boolean isMemoryLocked, final boolean shouldFail) { + this.mlockallSet = mlockallSet; + this.isMemoryLocked = isMemoryLocked; + this.shouldFail = shouldFail; + } + + } + + final List testCases = new ArrayList<>(); + testCases.add(new MlockallCheckTestCase(true, true, false)); + testCases.add(new MlockallCheckTestCase(true, false, true)); + testCases.add(new MlockallCheckTestCase(false, true, false)); + testCases.add(new MlockallCheckTestCase(false, false, false)); + + for (final MlockallCheckTestCase testCase : testCases) { + final BootstrapCheck.MlockallCheck check = new BootstrapCheck.MlockallCheck(testCase.mlockallSet) { + @Override + boolean isMemoryLocked() { + return testCase.isMemoryLocked; + } + }; + + if (testCase.shouldFail) { + try { + BootstrapCheck.check(true, Collections.singletonList(check)); + fail("should have failed due to memory not being locked"); + } catch (final RuntimeException e) { + assertThat( + e.getMessage(), + containsString("memory locking requested for elasticsearch process but memory is not locked")); + } + } else { + // nothing should happen + BootstrapCheck.check(true, Collections.singletonList(check)); + } + } + } + + public void testMaxNumberOfThreadsCheck() { + final int limit = 1 << 15; + final AtomicLong maxNumberOfThreads = new AtomicLong(randomIntBetween(1, limit - 1)); + final BootstrapCheck.MaxNumberOfThreadsCheck check = new BootstrapCheck.MaxNumberOfThreadsCheck() { + @Override + long getMaxNumberOfThreads() { + return maxNumberOfThreads.get(); + } + }; + + try { + BootstrapCheck.check(true, Collections.singletonList(check)); + fail("should have failed due to max number of threads too low"); + } catch (final RuntimeException e) { + assertThat(e.getMessage(), containsString("max number of threads")); + } + + maxNumberOfThreads.set(randomIntBetween(limit + 1, Integer.MAX_VALUE)); + + BootstrapCheck.check(true, Collections.singletonList(check)); + + // nothing should happen if current max number of threads is + // not available + maxNumberOfThreads.set(-1); + BootstrapCheck.check(true, Collections.singletonList(check)); + } + + public void testEnforceLimits() { + final Set enforceSettings = BootstrapCheck.enforceSettings(); + final Setting setting = randomFrom(Arrays.asList(enforceSettings.toArray(new Setting[enforceSettings.size()]))); + final Settings settings = Settings.builder().put(setting.getKey(), randomAsciiOfLength(8)).build(); + assertTrue(BootstrapCheck.enforceLimits(settings)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapSettingsTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapSettingsTests.java index 83eecc63886..c032d3ddee8 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapSettingsTests.java @@ -19,9 +19,7 @@ package org.elasticsearch.bootstrap; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.monitor.process.ProcessProbe; import org.elasticsearch.test.ESTestCase; public class BootstrapSettingsTests extends ESTestCase { @@ -33,22 +31,4 @@ public class BootstrapSettingsTests extends ESTestCase { assertTrue(BootstrapSettings.CTRLHANDLER_SETTING.get(Settings.EMPTY)); } - @AwaitsFix(bugUrl = "this feature is disabled for snapshot builds, for now - see #16835") - public void testEnforceMaxFileDescriptorLimits() { - // nothing should happen since we are in OOB mode - Bootstrap.enforceOrLogLimits(Settings.EMPTY); - - Settings build = Settings.builder().put(randomFrom(Bootstrap.ENFORCE_SETTINGS.toArray(new Setting[0])).getKey(), - "127.0.0.1").build(); - long maxFileDescriptorCount = ProcessProbe.getInstance().getMaxFileDescriptorCount(); - try { - Bootstrap.enforceOrLogLimits(build); - if (maxFileDescriptorCount != -1 && maxFileDescriptorCount < (1 << 16)) { - fail("must have enforced limits: " + maxFileDescriptorCount); - } - } catch (IllegalStateException ex) { - assertTrue(ex.getMessage(), ex.getMessage().startsWith("max file descriptors")); - } - } - } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index c39ba1fddc9..b13cee98565 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -20,7 +20,9 @@ package org.elasticsearch.bwcompat; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.search.Explanation; import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.SmallFloat; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; @@ -297,6 +299,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { importIndex(indexName); assertIndexSanity(indexName, version); assertBasicSearchWorks(indexName); + assertAllSearchWorks(indexName); assertBasicAggregationWorks(indexName); assertRealtimeGetWorks(indexName); assertNewReplicasWork(indexName); @@ -354,6 +357,39 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { assertEquals(numDocs, searchRsp.getHits().getTotalHits()); } + boolean findPayloadBoostInExplanation(Explanation expl) { + if (expl.getDescription().startsWith("payloadBoost=") && expl.getValue() != 1f) { + return true; + } else { + boolean found = false; + for (Explanation sub : expl.getDetails()) { + found |= findPayloadBoostInExplanation(sub); + } + return found; + } + } + + void assertAllSearchWorks(String indexName) { + logger.info("--> testing _all search"); + SearchResponse searchRsp = client().prepareSearch(indexName).get(); + ElasticsearchAssertions.assertNoFailures(searchRsp); + assertThat(searchRsp.getHits().getTotalHits(), greaterThanOrEqualTo(1L)); + SearchHit bestHit = searchRsp.getHits().getAt(0); + + // Make sure there are payloads and they are taken into account for the score + // the 'string' field has a boost of 4 in the mappings so it should get a payload boost + String stringValue = (String) bestHit.sourceAsMap().get("string"); + assertNotNull(stringValue); + Explanation explanation = client().prepareExplain(indexName, bestHit.getType(), bestHit.getId()) + .setQuery(QueryBuilders.matchQuery("_all", stringValue)).get().getExplanation(); + assertTrue("Could not find payload boost in explanation\n" + explanation, findPayloadBoostInExplanation(explanation)); + + // Make sure the query can run on the whole index + searchRsp = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("_all", stringValue)).setExplain(true).get(); + ElasticsearchAssertions.assertNoFailures(searchRsp); + assertThat(searchRsp.getHits().getTotalHits(), greaterThanOrEqualTo(1L)); + } + void assertBasicAggregationWorks(String indexName) { // histogram on a long SearchResponse searchRsp = client().prepareSearch(indexName).addAggregation(AggregationBuilders.histogram("histo").field("long_sort").interval(10)).get(); diff --git a/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index 934a4d0cb84..207232883c7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -129,7 +129,7 @@ public class SpecificMasterNodesIT extends ESIntegTestCase { logger.info("--> start data node / non master node"); internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); - assertAcked(prepareCreate("test").addMapping("type1", "{\"type1\" : {\"properties\" : {\"table_a\" : { \"type\" : \"nested\", \"properties\" : {\"field_a\" : { \"type\" : \"string\" },\"field_b\" :{ \"type\" : \"string\" }}}}}}")); + assertAcked(prepareCreate("test").addMapping("type1", "{\"type1\" : {\"properties\" : {\"table_a\" : { \"type\" : \"nested\", \"properties\" : {\"field_a\" : { \"type\" : \"keyword\" },\"field_b\" :{ \"type\" : \"keyword\" }}}}}}")); client().admin().indices().prepareAliases().addAlias("test", "a_test", QueryBuilders.nestedQuery("table_a", QueryBuilders.termQuery("table_a.field_b", "y"))).get(); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java new file mode 100644 index 00000000000..49d73ff6be8 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java @@ -0,0 +1,176 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.node; + +import org.elasticsearch.Version; +import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; + +public class DiscoveryNodesTests extends ESTestCase { + + public void testResolveNodeByIdOrName() { + DiscoveryNodes discoveryNodes = buildDiscoveryNodes(); + DiscoveryNode[] nodes = discoveryNodes.nodes().values().toArray(DiscoveryNode.class); + DiscoveryNode node = randomFrom(nodes); + DiscoveryNode resolvedNode = discoveryNodes.resolveNode(randomBoolean() ? node.id() : node.name()); + assertThat(resolvedNode.id(), equalTo(node.id())); + } + + public void testResolveNodeByAttribute() { + DiscoveryNodes discoveryNodes = buildDiscoveryNodes(); + NodeSelector nodeSelector = randomFrom(NodeSelector.values()); + Set matchingNodeIds = nodeSelector.matchingNodeIds(discoveryNodes); + try { + DiscoveryNode resolvedNode = discoveryNodes.resolveNode(nodeSelector.selector); + assertThat(matchingNodeIds.size(), equalTo(1)); + assertThat(resolvedNode.id(), equalTo(matchingNodeIds.iterator().next())); + } catch(IllegalArgumentException e) { + if (matchingNodeIds.size() == 0) { + assertThat(e.getMessage(), equalTo("failed to resolve [" + nodeSelector.selector + "], no matching nodes")); + } else if (matchingNodeIds.size() > 1) { + assertThat(e.getMessage(), containsString("where expected to be resolved to a single node")); + } else { + fail("resolveNode shouldn't have failed for [" + nodeSelector.selector + "]"); + } + } + } + + public void testResolveNodesIds() { + DiscoveryNodes discoveryNodes = buildDiscoveryNodes(); + + int numSelectors = randomIntBetween(1, 5); + Set nodeSelectors = new HashSet<>(); + Set expectedNodeIdsSet = new HashSet<>(); + for (int i = 0; i < numSelectors; i++) { + NodeSelector nodeSelector = randomFrom(NodeSelector.values()); + if (nodeSelectors.add(nodeSelector.selector)) { + expectedNodeIdsSet.addAll(nodeSelector.matchingNodeIds(discoveryNodes)); + } + } + int numNodeIds = randomIntBetween(0, 3); + String[] nodeIds = discoveryNodes.nodes().keys().toArray(String.class); + for (int i = 0; i < numNodeIds; i++) { + String nodeId = randomFrom(nodeIds); + nodeSelectors.add(nodeId); + expectedNodeIdsSet.add(nodeId); + } + int numNodeNames = randomIntBetween(0, 3); + DiscoveryNode[] nodes = discoveryNodes.nodes().values().toArray(DiscoveryNode.class); + for (int i = 0; i < numNodeNames; i++) { + DiscoveryNode discoveryNode = randomFrom(nodes); + nodeSelectors.add(discoveryNode.name()); + expectedNodeIdsSet.add(discoveryNode.id()); + } + + String[] resolvedNodesIds = discoveryNodes.resolveNodesIds(nodeSelectors.toArray(new String[nodeSelectors.size()])); + Arrays.sort(resolvedNodesIds); + String[] expectedNodesIds = expectedNodeIdsSet.toArray(new String[expectedNodeIdsSet.size()]); + Arrays.sort(expectedNodesIds); + assertThat(resolvedNodesIds, equalTo(expectedNodesIds)); + } + + private static DiscoveryNodes buildDiscoveryNodes() { + int numNodes = randomIntBetween(1, 10); + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + List nodesList = new ArrayList<>(); + for (int i = 0; i < numNodes; i++) { + Map attributes = new HashMap<>(); + if (randomBoolean()) { + attributes.put("master", Boolean.toString(randomBoolean())); + attributes.put("data", Boolean.toString(randomBoolean())); + attributes.put("ingest", Boolean.toString(randomBoolean())); + } else { + attributes.put("client", "true"); + } + if (frequently()) { + attributes.put("custom", randomBoolean() ? "match" : randomAsciiOfLengthBetween(3, 5)); + } + final DiscoveryNode node = newNode(i, attributes); + discoBuilder = discoBuilder.put(node); + nodesList.add(node); + } + discoBuilder.localNodeId(randomFrom(nodesList).id()); + discoBuilder.masterNodeId(randomFrom(nodesList).id()); + return discoBuilder.build(); + } + + private static DiscoveryNode newNode(int nodeId, Map attributes) { + return new DiscoveryNode("name_" + nodeId, "node_" + nodeId, DummyTransportAddress.INSTANCE, attributes, Version.CURRENT); + } + + private enum NodeSelector { + LOCAL("_local") { + @Override + Set matchingNodeIds(DiscoveryNodes nodes) { + return Collections.singleton(nodes.localNodeId()); + } + }, ELECTED_MASTER("_master") { + @Override + Set matchingNodeIds(DiscoveryNodes nodes) { + return Collections.singleton(nodes.masterNodeId()); + } + }, MASTER_ELIGIBLE("master:true") { + @Override + Set matchingNodeIds(DiscoveryNodes nodes) { + Set ids = new HashSet<>(); + nodes.getMasterNodes().keysIt().forEachRemaining(ids::add); + return ids; + } + }, DATA("data:true") { + @Override + Set matchingNodeIds(DiscoveryNodes nodes) { + Set ids = new HashSet<>(); + nodes.getDataNodes().keysIt().forEachRemaining(ids::add); + return ids; + } + }, CUSTOM_ATTRIBUTE("attr:value") { + @Override + Set matchingNodeIds(DiscoveryNodes nodes) { + Set ids = new HashSet<>(); + nodes.getNodes().valuesIt().forEachRemaining(node -> { + if ("value".equals(node.getAttributes().get("attr"))) { + ids.add(node.id()); + } + }); + return ids; + } + }; + + private final String selector; + + NodeSelector(String selector) { + this.selector = selector; + } + + abstract Set matchingNodeIds(DiscoveryNodes nodes); + } +} diff --git a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java index ca33288e874..7bd47ac70f8 100644 --- a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java @@ -48,7 +48,7 @@ public class CodecTests extends ESSingleNodeTestCase { public void testAcceptPostingsFormat() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("postings_format", Codec.getDefault().postingsFormat().getName()).endObject().endObject() + .startObject("properties").startObject("field").field("type", "keyword").field("postings_format", Codec.getDefault().postingsFormat().getName()).endObject().endObject() .endObject().endObject().string(); int i = 0; for (Version v : VersionUtils.allVersions()) { @@ -75,7 +75,7 @@ public class CodecTests extends ESSingleNodeTestCase { public void testAcceptDocValuesFormat() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("doc_values_format", Codec.getDefault().docValuesFormat().getName()).endObject().endObject() + .startObject("properties").startObject("field").field("type", "keyword").field("doc_values_format", Codec.getDefault().docValuesFormat().getName()).endObject().endObject() .endObject().endObject().string(); int i = 0; for (Version v : VersionUtils.allVersions()) { diff --git a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java index 7ee238ae7f2..9e7f54b8323 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java @@ -42,6 +42,7 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.SmallFloat; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.test.ESTestCase; @@ -89,8 +90,8 @@ public class SimpleAllTests extends ESTestCase { if (payload == null || payload.length == 0) { assertEquals(boost, 1f, 0.001f); } else { - assertEquals(4, payload.length); - final float b = PayloadHelper.decodeFloat(payload.bytes, payload.offset); + assertEquals(1, payload.length); + final float b = SmallFloat.byte315ToFloat(payload.bytes[payload.offset]); assertEquals(boost, b, 0.001f); } } diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java index 199ffaf8c03..17fbc882ba9 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.equalTo; public class ScriptScoreFunctionTests extends ESTestCase { /** - * Tests https://github.com/elasticsearch/elasticsearch/issues/2426 + * Tests https://github.com/elastic/elasticsearch/issues/2426 */ public void testScriptScoresReturnsNaN() throws IOException { ScoreFunction scoreFunction = new ScriptScoreFunction(new Script("Float.NaN"), new FloatValueScript(Float.NaN)); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 9d1176dd0f4..98bb385c122 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -367,7 +367,7 @@ public class SettingTests extends ESTestCase { } public void testDynamicKeySetting() { - Setting setting = Setting.dynamicKeySetting("foo.", "false", Boolean::parseBoolean, SettingsProperty.ClusterScope); + Setting setting = Setting.prefixKeySetting("foo.", "false", Boolean::parseBoolean, SettingsProperty.ClusterScope); assertTrue(setting.hasComplexMatcher()); assertTrue(setting.match("foo.bar")); assertFalse(setting.match("foo")); @@ -379,7 +379,28 @@ public class SettingTests extends ESTestCase { setting.getConcreteSetting("foo"); fail(); } catch (IllegalArgumentException ex) { - assertEquals("key must match setting but didn't [foo]", ex.getMessage()); + assertEquals("key [foo] must match [foo.] but didn't.", ex.getMessage()); + } + } + + public void testAdfixKeySetting() { + Setting setting = Setting.adfixKeySetting("foo", "enable", "false", Boolean::parseBoolean, false, Setting.Scope.CLUSTER); + assertTrue(setting.hasComplexMatcher()); + assertTrue(setting.match("foo.bar.enable")); + assertTrue(setting.match("foo.baz.enable")); + assertTrue(setting.match("foo.bar.baz.enable")); + assertFalse(setting.match("foo.bar")); + assertFalse(setting.match("foo.bar.baz.enabled")); + assertFalse(setting.match("foo")); + Setting concreteSetting = setting.getConcreteSetting("foo.bar.enable"); + assertTrue(concreteSetting.get(Settings.builder().put("foo.bar.enable", "true").build())); + assertFalse(concreteSetting.get(Settings.builder().put("foo.baz.enable", "true").build())); + + try { + setting.getConcreteSetting("foo"); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("key [foo] must match [foo*enable.] but didn't.", ex.getMessage()); } } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java index b0bbf0f47f2..c17429dccfb 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java @@ -46,20 +46,13 @@ public class PrioritizedRunnableTests extends ESTestCase { // test age advances with System#nanoTime public void testGetAgeInMillisWithRealClock() throws InterruptedException { - long nanosecondsInMillisecond = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS); PrioritizedRunnable runnable = new PrioritizedRunnable(Priority.NORMAL) { @Override public void run() { } }; - // force at least one millisecond to elapse, but ensure the - // clock has enough resolution to observe the passage of time - long start = System.nanoTime(); - long elapsed; - while ((elapsed = (System.nanoTime() - start)) < nanosecondsInMillisecond) { - // busy spin - } + long elapsed = spinForAtLeastOneMillisecond(); // creation happened before start, so age will be at least as // large as elapsed diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index d2aa7dc311d..b9d7107ed54 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -198,7 +198,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { /** - * Test that no split brain occurs under partial network partition. See https://github.com/elasticsearch/elasticsearch/issues/2488 + * Test that no split brain occurs under partial network partition. See https://github.com/elastic/elasticsearch/issues/2488 */ public void testFailWithMinimumMasterNodesConfigured() throws Exception { List nodes = startCluster(3); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index dfff924f1a5..8fd6e303220 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -285,7 +285,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { logger.info("--> one node is closed - start indexing data into the second one"); client.prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().field("field", "value3").endObject()).execute().actionGet(); // TODO: remove once refresh doesn't fail immediately if there a master block: - // https://github.com/elasticsearch/elasticsearch/issues/9997 + // https://github.com/elastic/elasticsearch/issues/9997 client.admin().cluster().prepareHealth("test").setWaitForYellowStatus().get(); client.admin().indices().prepareRefresh().execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index 6b417ce25be..68a4df685be 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -751,7 +751,7 @@ public class GetActionIT extends ESIntegTestCase { .startObject("field1").field("type", "object").startObject("properties") .startObject("field2").field("type", "object").startObject("properties") .startObject("field3").field("type", "object").startObject("properties") - .startObject("field4").field("type", "string").field("store", true) + .startObject("field4").field("type", "text").field("store", true) .endObject().endObject() .endObject().endObject() .endObject().endObject() diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 61f18eecf87..e9971a15f8e 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.engine; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.Level; import org.apache.log4j.LogManager; @@ -126,6 +128,7 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; @@ -293,8 +296,8 @@ public class InternalEngineTests extends ESTestCase { Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) { List segments = engine.segments(false); assertThat(segments.isEmpty(), equalTo(true)); - assertThat(engine.segmentsStats().getCount(), equalTo(0L)); - assertThat(engine.segmentsStats().getMemoryInBytes(), equalTo(0L)); + assertThat(engine.segmentsStats(false).getCount(), equalTo(0L)); + assertThat(engine.segmentsStats(false).getMemoryInBytes(), equalTo(0L)); // create a doc and refresh ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); @@ -306,7 +309,7 @@ public class InternalEngineTests extends ESTestCase { segments = engine.segments(false); assertThat(segments.size(), equalTo(1)); - SegmentsStats stats = engine.segmentsStats(); + SegmentsStats stats = engine.segmentsStats(false); assertThat(stats.getCount(), equalTo(1L)); assertThat(stats.getTermsMemoryInBytes(), greaterThan(0L)); assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0L)); @@ -324,7 +327,7 @@ public class InternalEngineTests extends ESTestCase { segments = engine.segments(false); assertThat(segments.size(), equalTo(1)); - assertThat(engine.segmentsStats().getCount(), equalTo(1L)); + assertThat(engine.segmentsStats(false).getCount(), equalTo(1L)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); @@ -337,12 +340,12 @@ public class InternalEngineTests extends ESTestCase { segments = engine.segments(false); assertThat(segments.size(), equalTo(2)); - assertThat(engine.segmentsStats().getCount(), equalTo(2L)); - assertThat(engine.segmentsStats().getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes())); - assertThat(engine.segmentsStats().getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes())); - assertThat(engine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0L)); - assertThat(engine.segmentsStats().getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes())); - assertThat(engine.segmentsStats().getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes())); + assertThat(engine.segmentsStats(false).getCount(), equalTo(2L)); + assertThat(engine.segmentsStats(false).getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes())); + assertThat(engine.segmentsStats(false).getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes())); + assertThat(engine.segmentsStats(false).getTermVectorsMemoryInBytes(), equalTo(0L)); + assertThat(engine.segmentsStats(false).getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes())); + assertThat(engine.segmentsStats(false).getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes())); assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); @@ -363,7 +366,7 @@ public class InternalEngineTests extends ESTestCase { segments = engine.segments(false); assertThat(segments.size(), equalTo(2)); - assertThat(engine.segmentsStats().getCount(), equalTo(2L)); + assertThat(engine.segmentsStats(false).getCount(), equalTo(2L)); assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); @@ -384,7 +387,7 @@ public class InternalEngineTests extends ESTestCase { segments = engine.segments(false); assertThat(segments.size(), equalTo(3)); - assertThat(engine.segmentsStats().getCount(), equalTo(3L)); + assertThat(engine.segmentsStats(false).getCount(), equalTo(3L)); assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); @@ -487,6 +490,29 @@ public class InternalEngineTests extends ESTestCase { } } + public void testSegmentsStatsIncludingFileSizes() throws Exception { + try (Store store = createStore(); + Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) { + assertThat(engine.segmentsStats(true).getFileSizes().size(), equalTo(0)); + + ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + engine.index(new Engine.Index(newUid("1"), doc)); + engine.refresh("test"); + + SegmentsStats stats = engine.segmentsStats(true); + assertThat(stats.getFileSizes().size(), greaterThan(0)); + assertThat((Iterable) () -> stats.getFileSizes().valuesIt(), everyItem(greaterThan(0L))); + + ObjectObjectCursor firstEntry = stats.getFileSizes().iterator().next(); + + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + engine.index(new Engine.Index(newUid("2"), doc2)); + engine.refresh("test"); + + assertThat(engine.segmentsStats(true).getFileSizes().get(firstEntry.key), greaterThan(firstEntry.value)); + } + } + public void testCommitStats() { Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 6b81512b796..b432b758ca1 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -276,8 +276,8 @@ public class ShadowEngineTests extends ESTestCase { primaryEngine = createInternalEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE); List segments = primaryEngine.segments(false); assertThat(segments.isEmpty(), equalTo(true)); - assertThat(primaryEngine.segmentsStats().getCount(), equalTo(0L)); - assertThat(primaryEngine.segmentsStats().getMemoryInBytes(), equalTo(0L)); + assertThat(primaryEngine.segmentsStats(false).getCount(), equalTo(0L)); + assertThat(primaryEngine.segmentsStats(false).getMemoryInBytes(), equalTo(0L)); // create a doc and refresh ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); @@ -289,7 +289,7 @@ public class ShadowEngineTests extends ESTestCase { segments = primaryEngine.segments(false); assertThat(segments.size(), equalTo(1)); - SegmentsStats stats = primaryEngine.segmentsStats(); + SegmentsStats stats = primaryEngine.segmentsStats(false); assertThat(stats.getCount(), equalTo(1L)); assertThat(stats.getTermsMemoryInBytes(), greaterThan(0L)); assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0L)); @@ -306,7 +306,7 @@ public class ShadowEngineTests extends ESTestCase { // Check that the replica sees nothing segments = replicaEngine.segments(false); assertThat(segments.size(), equalTo(0)); - stats = replicaEngine.segmentsStats(); + stats = replicaEngine.segmentsStats(false); assertThat(stats.getCount(), equalTo(0L)); assertThat(stats.getTermsMemoryInBytes(), equalTo(0L)); assertThat(stats.getStoredFieldsMemoryInBytes(), equalTo(0L)); @@ -323,7 +323,7 @@ public class ShadowEngineTests extends ESTestCase { // Check that the primary AND replica sees segments now segments = primaryEngine.segments(false); assertThat(segments.size(), equalTo(1)); - assertThat(primaryEngine.segmentsStats().getCount(), equalTo(1L)); + assertThat(primaryEngine.segmentsStats(false).getCount(), equalTo(1L)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); @@ -332,7 +332,7 @@ public class ShadowEngineTests extends ESTestCase { segments = replicaEngine.segments(false); assertThat(segments.size(), equalTo(1)); - assertThat(replicaEngine.segmentsStats().getCount(), equalTo(1L)); + assertThat(replicaEngine.segmentsStats(false).getCount(), equalTo(1L)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); assertThat(segments.get(0).getNumDocs(), equalTo(2)); @@ -346,12 +346,12 @@ public class ShadowEngineTests extends ESTestCase { segments = primaryEngine.segments(false); assertThat(segments.size(), equalTo(2)); - assertThat(primaryEngine.segmentsStats().getCount(), equalTo(2L)); - assertThat(primaryEngine.segmentsStats().getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes())); - assertThat(primaryEngine.segmentsStats().getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes())); - assertThat(primaryEngine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0L)); - assertThat(primaryEngine.segmentsStats().getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes())); - assertThat(primaryEngine.segmentsStats().getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes())); + assertThat(primaryEngine.segmentsStats(false).getCount(), equalTo(2L)); + assertThat(primaryEngine.segmentsStats(false).getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes())); + assertThat(primaryEngine.segmentsStats(false).getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes())); + assertThat(primaryEngine.segmentsStats(false).getTermVectorsMemoryInBytes(), equalTo(0L)); + assertThat(primaryEngine.segmentsStats(false).getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes())); + assertThat(primaryEngine.segmentsStats(false).getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes())); assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); @@ -370,12 +370,12 @@ public class ShadowEngineTests extends ESTestCase { segments = replicaEngine.segments(false); assertThat(segments.size(), equalTo(2)); - assertThat(replicaEngine.segmentsStats().getCount(), equalTo(2L)); - assertThat(replicaEngine.segmentsStats().getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes())); - assertThat(replicaEngine.segmentsStats().getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes())); - assertThat(replicaEngine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0L)); - assertThat(replicaEngine.segmentsStats().getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes())); - assertThat(replicaEngine.segmentsStats().getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes())); + assertThat(replicaEngine.segmentsStats(false).getCount(), equalTo(2L)); + assertThat(replicaEngine.segmentsStats(false).getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes())); + assertThat(replicaEngine.segmentsStats(false).getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes())); + assertThat(replicaEngine.segmentsStats(false).getTermVectorsMemoryInBytes(), equalTo(0L)); + assertThat(replicaEngine.segmentsStats(false).getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes())); + assertThat(replicaEngine.segmentsStats(false).getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes())); assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); @@ -393,7 +393,7 @@ public class ShadowEngineTests extends ESTestCase { segments = primaryEngine.segments(false); assertThat(segments.size(), equalTo(2)); - assertThat(primaryEngine.segmentsStats().getCount(), equalTo(2L)); + assertThat(primaryEngine.segmentsStats(false).getCount(), equalTo(2L)); assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); @@ -416,7 +416,7 @@ public class ShadowEngineTests extends ESTestCase { segments = primaryEngine.segments(false); assertThat(segments.size(), equalTo(3)); - assertThat(primaryEngine.segmentsStats().getCount(), equalTo(3L)); + assertThat(primaryEngine.segmentsStats(false).getCount(), equalTo(3L)); assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true)); assertThat(segments.get(0).isCommitted(), equalTo(true)); assertThat(segments.get(0).isSearch(), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 2371247be47..5c229545755 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -135,7 +135,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { @Before public void setup() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_0); // we need 2.x so that fielddata is allowed on string fields Settings settings = Settings.builder().put("index.fielddata.cache", "none") .put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); indexService = createIndex("test", settings); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index d84de2174a2..101e7368353 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -27,15 +27,12 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.fielddata.plain.PagedBytesAtomicFieldData; -import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedNumericDVIndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData; import org.elasticsearch.index.mapper.ContentPath; @@ -46,33 +43,38 @@ import org.elasticsearch.index.mapper.core.ByteFieldMapper; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.FloatFieldMapper; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.KeywordFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.ShortFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; -import java.util.Collections; -import java.util.IdentityHashMap; -import java.util.Set; +import java.util.Collection; import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testGetForFieldDefaults() { final IndexService indexService = createIndex("test"); final IndexFieldDataService ifdService = indexService.fieldData(); final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); - final MappedFieldType stringMapper = new StringFieldMapper.Builder("string").tokenized(false).build(ctx).fieldType(); + final MappedFieldType stringMapper = new KeywordFieldMapper.Builder("string").build(ctx).fieldType(); ifdService.clear(); IndexFieldData fd = ifdService.getForField(stringMapper); assertTrue(fd instanceof SortedSetDVOrdinalsIndexFieldData); @@ -99,36 +101,6 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { assertTrue(fd instanceof SortedNumericDVIndexFieldData); } - public void testChangeFieldDataFormat() throws Exception { - final IndexService indexService = createIndex("test"); - final IndexFieldDataService ifdService = indexService.fieldData(); - final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); - final MappedFieldType mapper1 = new StringFieldMapper.Builder("s").tokenized(false).docValues(true).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "paged_bytes").build()).build(ctx).fieldType(); - final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); - Document doc = new Document(); - doc.add(new StringField("s", "thisisastring", Store.NO)); - writer.addDocument(doc); - final IndexReader reader1 = DirectoryReader.open(writer, true); - IndexFieldData ifd = ifdService.getForField(mapper1); - assertThat(ifd, instanceOf(PagedBytesIndexFieldData.class)); - Set oldSegments = Collections.newSetFromMap(new IdentityHashMap()); - for (LeafReaderContext arc : reader1.leaves()) { - oldSegments.add(arc.reader()); - AtomicFieldData afd = ifd.load(arc); - assertThat(afd, instanceOf(PagedBytesAtomicFieldData.class)); - } - // write new segment - writer.addDocument(doc); - final IndexReader reader2 = DirectoryReader.open(writer, true); - final MappedFieldType mapper2 = new StringFieldMapper.Builder("s").tokenized(false).docValues(true).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "doc_values").build()).build(ctx).fieldType(); - ifd = ifdService.getForField(mapper2); - assertThat(ifd, instanceOf(SortedSetDVOrdinalsIndexFieldData.class)); - reader1.close(); - reader2.close(); - writer.close(); - writer.getDirectory().close(); - } - public void testFieldDataCacheListener() throws Exception { final IndexService indexService = createIndex("test"); final IndicesService indicesService = getInstanceFromNode(IndicesService.class); @@ -137,7 +109,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { indicesService.getIndicesFieldDataCache(), indicesService.getCircuitBreakerService(), indexService.mapperService()); final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); - final MappedFieldType mapper1 = new StringFieldMapper.Builder("s").tokenized(false).docValues(true).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "paged_bytes").build()).build(ctx).fieldType(); + final MappedFieldType mapper1 = new TextFieldMapper.Builder("s").build(ctx).fieldType(); final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); doc.add(new StringField("s", "thisisastring", Store.NO)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 0931120c177..97c030e9eec 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -242,7 +242,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals("{\"type\":{\"properties\":{\"foo\":{\"type\":\"string\"}}}}", serialize(update)); + assertEquals("{\"type\":{\"properties\":{\"foo\":{\"type\":\"text\"}}}}", serialize(update)); } public void testIncremental() throws Exception { @@ -264,7 +264,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { // but we have an update assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") // foo is NOT in the update - .startObject("bar").field("type", "string").endObject() + .startObject("bar").field("type", "text").endObject() .endObject().endObject().string(), serialize(update)); } @@ -284,8 +284,8 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { assertEquals(mapping, serialize(mapper)); // but we have an update assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("bar").field("type", "string").endObject() - .startObject("foo").field("type", "string").endObject() + .startObject("bar").field("type", "text").endObject() + .startObject("foo").field("type", "text").endObject() .endObject().endObject().string(), serialize(update)); } @@ -305,7 +305,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { assertEquals(mapping, serialize(mapper)); // but we have an update assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "string").endObject().endObject().endObject().endObject().endObject() + .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "text").endObject().endObject().endObject().endObject().endObject() .endObject().endObject().endObject().string(), serialize(update)); } @@ -325,7 +325,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { assertEquals(mapping, serialize(mapper)); // but we have an update assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("foo").field("type", "string").endObject() + .startObject("foo").field("type", "text").endObject() .endObject().endObject().endObject().string(), serialize(update)); } @@ -345,7 +345,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { assertEquals(mapping, serialize(mapper)); // but we have an update assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "string").endObject().endObject().endObject().endObject().endObject() + .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "text").endObject().endObject().endObject().endObject().endObject() .endObject().endObject().endObject().string(), serialize(update)); } @@ -366,7 +366,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { assertEquals(mapping, serialize(mapper)); assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").startObject("properties") - .startObject("bar").field("type", "string").endObject() + .startObject("bar").field("type", "text").endObject() .startObject("baz").field("type", "long").endObject() .endObject().endObject() .endObject().endObject().endObject().string(), serialize(update)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 0ec3d7d7a58..53d5e1744eb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -366,7 +366,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } } - // related to https://github.com/elasticsearch/elasticsearch/issues/5864 + // related to https://github.com/elastic/elasticsearch/issues/5864 public void testMistypedTypeInRoot() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mistyped_type_in_root.json"); try { @@ -378,7 +378,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } } - // issue https://github.com/elasticsearch/elasticsearch/issues/5864 + // issue https://github.com/elastic/elasticsearch/issues/5864 public void testMisplacedMappingAsRoot() throws IOException { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/misplaced_mapping_key_in_root.json"); try { @@ -390,7 +390,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } } - // issue https://github.com/elasticsearch/elasticsearch/issues/5864 + // issue https://github.com/elastic/elasticsearch/issues/5864 // test that RootObjectMapping still works public void testRootObjectMapperPropertiesDoNotCauseException() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); @@ -404,7 +404,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { parser.parse("test", new CompressedXContent(mapping)); } - // issue https://github.com/elasticsearch/elasticsearch/issues/5864 + // issue https://github.com/elastic/elasticsearch/issues/5864 public void testMetadataMappersStillWorking() throws MapperParsingException, IOException { String mapping = "{"; Map rootTypes = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java index facc1eb41ad..c0eca41b26f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java @@ -112,7 +112,7 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase { .startObject("test") .startObject("properties") .startObject("foo") - .field("type", "string") + .field("type", "text") .field("store", storeCountedFields) .field("analyzer", "simple") .startObject("fields") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index 140fa11c810..3056b63b4c0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; @@ -48,11 +47,10 @@ import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.TestSearchContext; -import org.elasticsearch.test.VersionUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Before; @@ -106,11 +104,11 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { assertThat(fieldMapper, instanceOf(DateFieldMapper.class)); fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date1"); - assertThat(fieldMapper, instanceOf(StringFieldMapper.class)); + assertThat(fieldMapper, instanceOf(TextFieldMapper.class)); fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date2"); - assertThat(fieldMapper, instanceOf(StringFieldMapper.class)); + assertThat(fieldMapper, instanceOf(TextFieldMapper.class)); fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date3"); - assertThat(fieldMapper, instanceOf(StringFieldMapper.class)); + assertThat(fieldMapper, instanceOf(TextFieldMapper.class)); } public void testParseLocal() { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java index 250b7a8d28a..319d087f7fc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java @@ -45,7 +45,7 @@ public class SimpleDynamicTemplatesTests extends ESSingleNodeTestCase { XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject().startObject("person").startArray("dynamic_templates").startObject().startObject("test") .field("match_mapping_type", "string") - .startObject("mapping").field("index", "no").endObject() + .startObject("mapping").field("index", false).endObject() .endObject().endObject().endArray().endObject().endObject(); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("person").setSource(builder.string()).get(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index b165722bf2a..38b13baebba 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -140,7 +140,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("test"); DocumentMapper builderDocMapper = new DocumentMapper.Builder(new RootObjectMapper.Builder("person").add( - new StringFieldMapper.Builder("name").store(true) + new TextFieldMapper.Builder("name").store(true) .addMultiField(new TextFieldMapper.Builder("indexed").index(true).tokenized(true)) .addMultiField(new TextFieldMapper.Builder("not_indexed").index(false).store(true)) ), indexService.mapperService()).build(indexService.mapperService()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java b/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java index be3617aaa5a..c4000553d6c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/null_value/NullValueTests.java @@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.equalTo; public class NullValueTests extends ESSingleNodeTestCase { public void testNullNullValue() throws Exception { IndexService indexService = createIndex("test", Settings.settingsBuilder().build()); - String[] typesToTest = {"integer", "long", "double", "float", "short", "date", "ip", "string", "boolean", "byte"}; + String[] typesToTest = {"integer", "long", "double", "float", "short", "date", "ip", "keyword", "boolean", "byte"}; for (String type : typesToTest) { String mapping = XContentFactory.jsonBuilder() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index e0c55a85e09..bf21f2fd6d3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -40,7 +40,7 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.string.SimpleStringMappingTests; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -111,10 +111,10 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { defaultMapper = index.mapperService().documentMapper("type"); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); - assertThat(mapper, instanceOf(StringFieldMapper.class)); + assertThat(mapper, instanceOf(TextFieldMapper.class)); mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); - assertThat(mapper, instanceOf(StringFieldMapper.class)); + assertThat(mapper, instanceOf(TextFieldMapper.class)); } public void testIgnoreMalformedOption() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index caeb321b8c6..86c67db219f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -47,7 +47,6 @@ import org.elasticsearch.index.mapper.core.StringFieldMapper.Builder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import org.junit.Before; import java.io.IOException; @@ -75,7 +74,9 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { @Before public void before() { - indexService = createIndex("test"); + indexService = createIndex("test", + // we need 2.x since string is deprecated in 5.0 + Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()); parser = indexService.mapperService().documentMapperParser(); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/StringFieldMapperPositionIncrementGapTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/StringFieldMapperPositionIncrementGapTests.java index c0e957e522f..680f0732f1f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/StringFieldMapperPositionIncrementGapTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/StringFieldMapperPositionIncrementGapTests.java @@ -20,14 +20,20 @@ package org.elasticsearch.index.mapper.string; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -38,6 +44,12 @@ import static org.hamcrest.Matchers.containsString; * expected in queries. */ public class StringFieldMapperPositionIncrementGapTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + /** * The default position_increment_gap should be large enough that most * "sensible" queries phrase slops won't match across values. @@ -106,11 +118,12 @@ public class StringFieldMapperPositionIncrementGapTests extends ESSingleNodeTest * strange but not worth breaking some thought. */ public void testDefaultDefaultsToAnalyzer() throws IOException { - XContentBuilder settings = XContentFactory.jsonBuilder().startObject().startObject("analysis").startObject("analyzer") - .startObject("gappy"); - settings.field("type", "custom"); - settings.field("tokenizer", "standard"); - settings.field("position_increment_gap", 2); + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) + .put("analysis.analyzer.gappy.type", "custom") + .put("analysis.analyzer.gappy.tokenizer", "standard") + .put("analysis.analyzer.gappy.position_increment_gap", "2") + .build(); setupAnalyzer(settings, "gappy"); testGap(client(), "test", "test", 2); } @@ -123,7 +136,10 @@ public class StringFieldMapperPositionIncrementGapTests extends ESSingleNodeTest XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string"); mapping.field("type", "string"); mapping.field("position_increment_gap", positionIncrementGap); - client().admin().indices().prepareCreate("test").addMapping("test", mapping).get(); + client().admin().indices().prepareCreate("test") + .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()) + .addMapping("test", mapping) + .get(); } /** @@ -131,11 +147,14 @@ public class StringFieldMapperPositionIncrementGapTests extends ESSingleNodeTest * named "string" that uses the specified analyzer and default * position_increment_gap. */ - private void setupAnalyzer(XContentBuilder settings, String analyzer) throws IOException { + private void setupAnalyzer(Settings settings, String analyzer) throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string"); mapping.field("type", "string"); mapping.field("analyzer", analyzer); - client().admin().indices().prepareCreate("test").addMapping("test", mapping).setSettings(settings).get(); + client().admin().indices().prepareCreate("test") + .addMapping("test", mapping) + .setSettings(settings) + .get(); } private static void testGap(Client client, String indexName, String type, int positionIncrementGap) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index ccdb09eb507..d9740d9f32b 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -185,7 +185,7 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase legacyMeta = new HashMap<>(); - for (String file : store.directory().listAll()) { - if (file.equals("write.lock") || file.equals(IndexFileNames.OLD_SEGMENTS_GEN) || file.startsWith("extra")) { - continue; - } - BytesRef hash = new BytesRef(); - if (file.startsWith("segments")) { - hash = Store.MetadataSnapshot.hashFile(store.directory(), file); - } - StoreFileMetaData storeFileMetaData = new StoreFileMetaData(file, store.directory().fileLength(file), file + "checksum", null, hash); - legacyMeta.put(file, storeFileMetaData); - checksums.add(storeFileMetaData); - } - checksums.write(store); - - metadata = store.getMetadata(); - Map stringStoreFileMetaDataMap = metadata.asMap(); - assertThat(legacyMeta.size(), equalTo(stringStoreFileMetaDataMap.size())); - if (usesOldCodec) { - for (StoreFileMetaData meta : legacyMeta.values()) { - assertTrue(meta.toString(), stringStoreFileMetaDataMap.containsKey(meta.name())); - assertEquals(meta.name() + "checksum", meta.checksum()); - assertTrue(meta + " vs. " + stringStoreFileMetaDataMap.get(meta.name()), stringStoreFileMetaDataMap.get(meta.name()).isSame(meta)); - } - } else { - - // even if we have a legacy checksum - if we use a new codec we should reuse - for (StoreFileMetaData meta : legacyMeta.values()) { - assertTrue(meta.toString(), stringStoreFileMetaDataMap.containsKey(meta.name())); - assertFalse(meta + " vs. " + stringStoreFileMetaDataMap.get(meta.name()), stringStoreFileMetaDataMap.get(meta.name()).isSame(meta)); - StoreFileMetaData storeFileMetaData = metadata.get(meta.name()); - try (IndexInput input = store.openVerifyingInput(meta.name(), IOContext.DEFAULT, storeFileMetaData)) { - assertTrue(storeFileMetaData.toString(), input instanceof Store.VerifyingIndexInput); - input.seek(meta.length()); - Store.verify(input); - } - } - } - assertDeleteContent(store, directoryService); - IOUtils.close(store); - - } - public void testNewChecksums() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); @@ -489,7 +400,6 @@ public class StoreTests extends ESTestCase { try (IndexInput input = store.directory().openInput(meta.name(), IOContext.DEFAULT)) { String checksum = Store.digestToString(CodecUtil.retrieveChecksum(input)); assertThat("File: " + meta.name() + " has a different checksum", meta.checksum(), equalTo(checksum)); - assertThat(meta.hasLegacyChecksum(), equalTo(false)); assertThat(meta.writtenBy(), equalTo(Version.LATEST)); if (meta.name().endsWith(".si") || meta.name().startsWith("segments_")) { assertThat(meta.hash().length, greaterThan(0)); @@ -503,97 +413,6 @@ public class StoreTests extends ESTestCase { IOUtils.close(store); } - public void testMixedChecksums() throws IOException { - final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); - // this time random codec.... - IndexWriter writer = new IndexWriter(store.directory(), newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec())); - int docs = 1 + random().nextInt(100); - - for (int i = 0; i < docs; i++) { - Document doc = new Document(); - doc.add(new TextField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); - doc.add(new TextField("body", TestUtil.randomRealisticUnicodeString(random()), random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); - doc.add(new SortedDocValuesField("dv", new BytesRef(TestUtil.randomRealisticUnicodeString(random())))); - writer.addDocument(doc); - } - if (random().nextBoolean()) { - for (int i = 0; i < docs; i++) { - if (random().nextBoolean()) { - Document doc = new Document(); - doc.add(new TextField("id", "" + i, random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); - doc.add(new TextField("body", TestUtil.randomRealisticUnicodeString(random()), random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); - writer.updateDocument(new Term("id", "" + i), doc); - } - } - } - if (random().nextBoolean()) { - DirectoryReader.open(writer, random().nextBoolean()).close(); // flush - } - Store.MetadataSnapshot metadata; - // check before we committed - try { - store.getMetadata(); - fail("no index present - expected exception"); - } catch (IndexNotFoundException ex) { - // expected - } - assertThat(store.getMetadataOrEmpty(), is(Store.MetadataSnapshot.EMPTY)); // nothing committed - writer.commit(); - writer.close(); - Store.LegacyChecksums checksums = new Store.LegacyChecksums(); - metadata = store.getMetadata(); - assertThat(metadata.asMap().isEmpty(), is(false)); - for (StoreFileMetaData meta : metadata) { - try (IndexInput input = store.directory().openInput(meta.name(), IOContext.DEFAULT)) { - if (meta.checksum() == null) { - String checksum = null; - try { - CodecUtil.retrieveChecksum(input); - fail("expected a corrupt index - posting format has not checksums"); - } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { - try (ChecksumIndexInput checksumIndexInput = store.directory().openChecksumInput(meta.name(), IOContext.DEFAULT)) { - checksumIndexInput.seek(meta.length()); - checksum = Store.digestToString(checksumIndexInput.getChecksum()); - } - // fine - it's a postings format without checksums - checksums.add(new StoreFileMetaData(meta.name(), meta.length(), checksum, null)); - } - } else { - String checksum = Store.digestToString(CodecUtil.retrieveChecksum(input)); - assertThat("File: " + meta.name() + " has a different checksum", meta.checksum(), equalTo(checksum)); - assertThat(meta.hasLegacyChecksum(), equalTo(false)); - assertThat(meta.writtenBy(), equalTo(Version.LATEST)); - } - } - } - assertConsistent(store, metadata); - checksums.write(store); - metadata = store.getMetadata(); - assertThat(metadata.asMap().isEmpty(), is(false)); - for (StoreFileMetaData meta : metadata) { - assertThat("file: " + meta.name() + " has a null checksum", meta.checksum(), not(nullValue())); - if (meta.hasLegacyChecksum()) { - try (ChecksumIndexInput checksumIndexInput = store.directory().openChecksumInput(meta.name(), IOContext.DEFAULT)) { - checksumIndexInput.seek(meta.length()); - assertThat(meta.checksum(), equalTo(Store.digestToString(checksumIndexInput.getChecksum()))); - } - } else { - try (IndexInput input = store.directory().openInput(meta.name(), IOContext.DEFAULT)) { - String checksum = Store.digestToString(CodecUtil.retrieveChecksum(input)); - assertThat("File: " + meta.name() + " has a different checksum", meta.checksum(), equalTo(checksum)); - assertThat(meta.hasLegacyChecksum(), equalTo(false)); - assertThat(meta.writtenBy(), equalTo(Version.LATEST)); - } - } - } - assertConsistent(store, metadata); - TestUtil.checkIndex(store.directory()); - assertDeleteContent(store, directoryService); - IOUtils.close(store); - } - public void testRenameFile() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random(), false); @@ -654,18 +473,7 @@ public class StoreTests extends ESTestCase { } final Adler32 adler32 = new Adler32(); - long legacyFileLength = 0; - try (IndexOutput output = dir.createOutput("legacy.bin", IOContext.DEFAULT)) { - int iters = scaledRandomIntBetween(10, 100); - for (int i = 0; i < iters; i++) { - BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); - output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); - adler32.update(bytesRef.bytes, bytesRef.offset, bytesRef.length); - legacyFileLength += bytesRef.length; - } - } final long luceneChecksum; - final long adler32LegacyChecksum = adler32.getValue(); try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) { assertEquals(luceneFileLength, indexInput.length()); luceneChecksum = CodecUtil.retrieveChecksum(indexInput); @@ -673,38 +481,22 @@ public class StoreTests extends ESTestCase { { // positive check StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); - StoreFileMetaData legacy = new StoreFileMetaData("legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum)); - assertTrue(legacy.hasLegacyChecksum()); - assertFalse(lucene.hasLegacyChecksum()); assertTrue(Store.checkIntegrityNoException(lucene, dir)); - assertTrue(Store.checkIntegrityNoException(legacy, dir)); } { // negative check - wrong checksum StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum + 1), Version.LUCENE_4_8_0); - StoreFileMetaData legacy = new StoreFileMetaData("legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum + 1)); - assertTrue(legacy.hasLegacyChecksum()); - assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrityNoException(lucene, dir)); - assertFalse(Store.checkIntegrityNoException(legacy, dir)); } { // negative check - wrong length StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength + 1, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); - StoreFileMetaData legacy = new StoreFileMetaData("legacy.bin", legacyFileLength + 1, Store.digestToString(adler32LegacyChecksum)); - assertTrue(legacy.hasLegacyChecksum()); - assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrityNoException(lucene, dir)); - assertFalse(Store.checkIntegrityNoException(legacy, dir)); } { // negative check - wrong file StoreFileMetaData lucene = new StoreFileMetaData("legacy.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); - StoreFileMetaData legacy = new StoreFileMetaData("lucene_checksum.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum)); - assertTrue(legacy.hasLegacyChecksum()); - assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrityNoException(lucene, dir)); - assertFalse(Store.checkIntegrityNoException(legacy, dir)); } dir.close(); @@ -827,7 +619,7 @@ public class StoreTests extends ESTestCase { public static void assertConsistent(Store store, Store.MetadataSnapshot metadata) throws IOException { for (String file : store.directory().listAll()) { - if (!IndexWriter.WRITE_LOCK_NAME.equals(file) && !IndexFileNames.OLD_SEGMENTS_GEN.equals(file) && !Store.isChecksum(file) && file.startsWith("extra") == false) { + if (!IndexWriter.WRITE_LOCK_NAME.equals(file) && !IndexFileNames.OLD_SEGMENTS_GEN.equals(file) && file.startsWith("extra") == false) { assertTrue(file + " is not in the map: " + metadata.asMap().size() + " vs. " + store.directory().listAll().length, metadata.asMap().containsKey(file)); } else { assertFalse(file + " is not in the map: " + metadata.asMap().size() + " vs. " + store.directory().listAll().length, metadata.asMap().containsKey(file)); @@ -835,21 +627,6 @@ public class StoreTests extends ESTestCase { } } - /** - * Legacy indices without lucene CRC32 did never write or calculate checksums for segments_N files - * but for other files - */ - public void testRecoveryDiffWithLegacyCommit() { - Map metaDataMap = new HashMap<>(); - metaDataMap.put("segments_1", new StoreFileMetaData("segments_1", 50, null, null, new BytesRef(new byte[]{1}))); - metaDataMap.put("_0_1.del", new StoreFileMetaData("_0_1.del", 42, "foobarbaz", null, new BytesRef())); - Store.MetadataSnapshot first = new Store.MetadataSnapshot(unmodifiableMap(new HashMap<>(metaDataMap)), emptyMap(), 0); - - Store.MetadataSnapshot second = new Store.MetadataSnapshot(unmodifiableMap(new HashMap<>(metaDataMap)), emptyMap(), 0); - Store.RecoveryDiff recoveryDiff = first.recoveryDiff(second); - assertEquals(recoveryDiff.toString(), recoveryDiff.different.size(), 2); - } - public void testRecoveryDiff() throws IOException, InterruptedException { int numDocs = 2 + random().nextInt(100); List docs = new ArrayList<>(); @@ -1043,21 +820,6 @@ public class StoreTests extends ESTestCase { Store.MetadataSnapshot secondMeta = store.getMetadata(); - Store.LegacyChecksums checksums = new Store.LegacyChecksums(); - Map legacyMeta = new HashMap<>(); - for (String file : store.directory().listAll()) { - if (file.equals("write.lock") || file.equals(IndexFileNames.OLD_SEGMENTS_GEN) || file.startsWith("extra")) { - continue; - } - BytesRef hash = new BytesRef(); - if (file.startsWith("segments")) { - hash = Store.MetadataSnapshot.hashFile(store.directory(), file); - } - StoreFileMetaData storeFileMetaData = new StoreFileMetaData(file, store.directory().fileLength(file), file + "checksum", null, hash); - legacyMeta.put(file, storeFileMetaData); - checksums.add(storeFileMetaData); - } - checksums.write(store); // write one checksum file here - we expect it to survive all the cleanups if (randomBoolean()) { store.cleanupAndVerify("test", firstMeta); @@ -1068,16 +830,13 @@ public class StoreTests extends ESTestCase { if (file.startsWith("extra")) { continue; } - assertTrue(firstMeta.contains(file) || Store.isChecksum(file) || file.equals("write.lock")); - if (Store.isChecksum(file)) { - numChecksums++; - } else if (secondMeta.contains(file) == false) { + assertTrue(firstMeta.contains(file) || file.equals("write.lock")); + if (secondMeta.contains(file) == false) { numNotFound++; } } assertTrue("at least one file must not be in here since we have two commits?", numNotFound > 0); - assertEquals("we wrote one checksum but it's gone now? - checksums are supposed to be kept", numChecksums, 1); } else { store.cleanupAndVerify("test", secondMeta); String[] strings = store.directory().listAll(); @@ -1087,16 +846,13 @@ public class StoreTests extends ESTestCase { if (file.startsWith("extra")) { continue; } - assertTrue(file, secondMeta.contains(file) || Store.isChecksum(file) || file.equals("write.lock")); - if (Store.isChecksum(file)) { - numChecksums++; - } else if (firstMeta.contains(file) == false) { + assertTrue(file, secondMeta.contains(file) || file.equals("write.lock")); + if (firstMeta.contains(file) == false) { numNotFound++; } } assertTrue("at least one file must not be in here since we have two commits?", numNotFound > 0); - assertEquals("we wrote one checksum but it's gone now? - checksums are supposed to be kept", numChecksums, 1); } deleteContent(store.directory()); @@ -1105,8 +861,8 @@ public class StoreTests extends ESTestCase { public void testCleanUpWithLegacyChecksums() throws IOException { Map metaDataMap = new HashMap<>(); - metaDataMap.put("segments_1", new StoreFileMetaData("segments_1", 50, null, null, new BytesRef(new byte[]{1}))); - metaDataMap.put("_0_1.del", new StoreFileMetaData("_0_1.del", 42, "foobarbaz", null, new BytesRef())); + metaDataMap.put("segments_1", new StoreFileMetaData("segments_1", 50, "foobar", Version.LUCENE_4_8_0, new BytesRef(new byte[]{1}))); + metaDataMap.put("_0_1.del", new StoreFileMetaData("_0_1.del", 42, "foobarbaz", Version.LUCENE_4_8_0, new BytesRef())); Store.MetadataSnapshot snapshot = new Store.MetadataSnapshot(unmodifiableMap(metaDataMap), emptyMap(), 0); final ShardId shardId = new ShardId("index", "_na_", 1); @@ -1232,8 +988,8 @@ public class StoreTests extends ESTestCase { } protected Store.MetadataSnapshot createMetaDataSnapshot() { - StoreFileMetaData storeFileMetaData1 = new StoreFileMetaData("segments", 1); - StoreFileMetaData storeFileMetaData2 = new StoreFileMetaData("no_segments", 1); + StoreFileMetaData storeFileMetaData1 = new StoreFileMetaData("segments", 1, "666"); + StoreFileMetaData storeFileMetaData2 = new StoreFileMetaData("no_segments", 1, "666"); Map storeFileMetaDataMap = new HashMap<>(); storeFileMetaDataMap.put(storeFileMetaData1.name(), storeFileMetaData1); storeFileMetaDataMap.put(storeFileMetaData2.name(), storeFileMetaData2); diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index 134a862569c..09371c38dab 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -117,7 +117,7 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { /** * Test case for #5030: Upgrading analysis plugins fails - * See https://github.com/elasticsearch/elasticsearch/issues/5030 + * See https://github.com/elastic/elasticsearch/issues/5030 */ public void testThatPluginAnalyzersCanBeUpdated() throws Exception { final XContentBuilder mapping = jsonBuilder().startObject() diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java index feb9863ec1e..2981f2d110c 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -44,8 +44,8 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase { final String fieldName = "field"; final String mapping = "{ \"" + mappingType + "\": {" + "\"dynamic_templates\": [" - + "{ \"" + fieldName + "\": {" + "\"path_match\": \"*\"," + "\"mapping\": {" + "\"type\": \"string\"," + "\"store\": true," - + "\"index\": \"analyzed\", \"analyzer\": \"whitespace\" } } } ] } }"; + + "{ \"" + fieldName + "\": {" + "\"path_match\": \"*\"," + "\"mapping\": {" + "\"type\": \"text\"," + "\"store\": true," + + "\"analyzer\": \"whitespace\" } } } ] } }"; // The 'fieldNames' array is used to help with retrieval of index terms // after testing diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index a3169f08498..0951f3c46df 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -100,7 +100,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { settingsBuilder() .put("index.number_of_shards", 1) .put("index.number_of_replicas", 0) - ).addMapping("doc", "{\"doc\":{\"properties\":{\"body\":{\"type\":\"string\"}}}}") + ).addMapping("doc", "{\"doc\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}") .execute().actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -112,7 +112,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").execute().actionGet(); assertThat(getMappingsResponse.mappings().get("test").get("doc").source().toString(), - equalTo("{\"doc\":{\"properties\":{\"body\":{\"type\":\"string\"},\"date\":{\"type\":\"integer\"}}}}")); + equalTo("{\"doc\":{\"properties\":{\"body\":{\"type\":\"text\"},\"date\":{\"type\":\"integer\"}}}}")); } public void testUpdateMappingWithoutTypeMultiObjects() throws Exception { @@ -141,7 +141,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { settingsBuilder() .put("index.number_of_shards", 2) .put("index.number_of_replicas", 0) - ).addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"string\"}}}}") + ).addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}") .execute().actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -150,17 +150,17 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}").execute().actionGet(); fail("Expected MergeMappingException"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [body] of different type, current_type [string], merged_type [integer]")); + assertThat(e.getMessage(), containsString("mapper [body] of different type, current_type [text], merged_type [integer]")); } } public void testUpdateMappingWithNormsConflicts() throws Exception { client().admin().indices().prepareCreate("test") - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"string\", \"norms\": { \"enabled\": false }}}}}") + .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": { \"enabled\": false }}}}}") .execute().actionGet(); try { client().admin().indices().preparePutMapping("test").setType("type") - .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"string\", \"norms\": { \"enabled\": true }}}}}").execute() + .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": { \"enabled\": true }}}}}").execute() .actionGet(); fail("Expected MergeMappingException"); } catch (IllegalArgumentException e) { @@ -169,7 +169,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { } /* - Second regression test for https://github.com/elasticsearch/elasticsearch/issues/3381 + Second regression test for https://github.com/elastic/elasticsearch/issues/3381 */ public void testUpdateMappingNoChanges() throws Exception { client().admin().indices().prepareCreate("test") @@ -177,12 +177,12 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { settingsBuilder() .put("index.number_of_shards", 2) .put("index.number_of_replicas", 0) - ).addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"string\"}}}}") + ).addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}") .execute().actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("type") - .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"string\"}}}}") + .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}") .execute().actionGet(); //no changes, we return diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 6cdd4cf348a..ec808647787 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -148,7 +148,7 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Create an index where the mappings have a field data filter assertAcked(prepareCreate("ramtest").setSource("{\"mappings\": {\"type\": {\"properties\": {\"test\": " + - "{\"type\": \"string\",\"fielddata\": {\"filter\": {\"regex\": {\"pattern\": \"^value.*\"}}}}}}}}")); + "{\"type\": \"text\",\"fielddata\": {\"filter\": {\"regex\": {\"pattern\": \"^value.*\"}}}}}}}}")); ensureGreen("ramtest"); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java index a4ffef7b9eb..8ba0ac45c73 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.indices.recovery; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -25,6 +27,7 @@ import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -35,7 +38,7 @@ import java.util.regex.Pattern; /** */ public class RecoveryStatusTests extends ESSingleNodeTestCase { - + public void testRenameTempFiles() throws IOException { IndexService service = createIndex("foo"); @@ -50,14 +53,16 @@ public class RecoveryStatusTests extends ESSingleNodeTestCase { public void onRecoveryFailure(RecoveryState state, RecoveryFailedException e, boolean sendShardFailure) { } }); - try (IndexOutput indexOutput = status.openAndPutIndexOutput("foo.bar", new StoreFileMetaData("foo.bar", 8), status.store())) { + try (IndexOutput indexOutput = status.openAndPutIndexOutput("foo.bar", new StoreFileMetaData("foo.bar", 8 + CodecUtil.footerLength(), "9z51nw"), status.store())) { indexOutput.writeInt(1); IndexOutput openIndexOutput = status.getOpenIndexOutput("foo.bar"); assertSame(openIndexOutput, indexOutput); openIndexOutput.writeInt(1); + CodecUtil.writeFooter(indexOutput); } + try { - status.openAndPutIndexOutput("foo.bar", new StoreFileMetaData("foo.bar", 8), status.store()); + status.openAndPutIndexOutput("foo.bar", new StoreFileMetaData("foo.bar", 8 + CodecUtil.footerLength(), "9z51nw"), status.store()); fail("file foo.bar is already opened and registered"); } catch (IllegalStateException ex) { assertEquals("output for file [foo.bar] has already been created", ex.getMessage()); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 1d6c54f643b..e29ad3e081a 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -749,7 +749,7 @@ public class IndexStatsIT extends ESIntegTestCase { assertAcked(prepareCreate("test1") .addMapping( "bar", - "{ \"properties\": { \"bar\": { \"type\": \"string\", \"fields\": { \"completion\": { \"type\": \"completion\" }}},\"baz\": { \"type\": \"string\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}}}")); + "{ \"properties\": { \"bar\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}},\"baz\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}}}")); ensureGreen(); client().prepareIndex("test1", "bar", Integer.toString(1)).setSource("{\"bar\":\"bar\",\"baz\":\"baz\"}").execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 8881624c206..cce687fcec3 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -345,7 +345,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { client().admin().indices().preparePutTemplate("template_with_aliases") .setTemplate("te*") - .addMapping("type1", "{\"type1\" : {\"properties\" : {\"value\" : {\"type\" : \"string\"}}}}") + .addMapping("type1", "{\"type1\" : {\"properties\" : {\"value\" : {\"type\" : \"text\"}}}}") .addAlias(new Alias("simple_alias")) .addAlias(new Alias("templated_alias-{index}")) .addAlias(new Alias("filtered_alias").filter("{\"type\":{\"value\":\"type2\"}}")) diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java index b643ba0d0ad..e3777e84f9a 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java @@ -38,13 +38,8 @@ import static org.hamcrest.Matchers.notNullValue; /** * */ -@ClusterScope(scope= Scope.TEST, numDataNodes =0) +@ClusterScope(scope= Scope.TEST, numDataNodes = 0) public class SimpleNodesInfoIT extends ESIntegTestCase { - static final class Fields { - static final String SITE_PLUGIN = "dummy"; - static final String SITE_PLUGIN_DESCRIPTION = "This is a description for a dummy test site plugin."; - static final String SITE_PLUGIN_VERSION = "0.0.7-BOND-SITE"; - } public void testNodesInfos() throws Exception { List nodesIds = internalCluster().startNodesAsync(2).get(); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index f9eba5fc7dd..01494aab72d 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -1683,10 +1683,10 @@ public class PercolatorIT extends ESIntegTestCase { String mapping = "{\n" + " \"doc\": {\n" + " \"properties\": {\n" + - " \"name\": {\"type\":\"string\"},\n" + + " \"name\": {\"type\":\"text\"},\n" + " \"persons\": {\n" + " \"type\": \"nested\"\n," + - " \"properties\" : {\"foo\" : {\"type\" : \"string\"}}" + + " \"properties\" : {\"foo\" : {\"type\" : \"text\"}}" + " }\n" + " }\n" + " }\n" + diff --git a/core/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java index a43b481b2e8..6a4de4b9ff2 100644 --- a/core/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/core/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -249,7 +249,7 @@ public class AliasRoutingIT extends ESIntegTestCase { } /* - See https://github.com/elasticsearch/elasticsearch/issues/2682 + See https://github.com/elastic/elasticsearch/issues/2682 Searching on more than one index, if one of those is an alias with configured routing, the shards that belonged to the other indices (without routing) were not taken into account in PlainOperationRouting#searchShards. That affected the number of shards that we executed the search on, thus some documents were missing in the search results. @@ -273,7 +273,7 @@ public class AliasRoutingIT extends ESIntegTestCase { } /* - See https://github.com/elasticsearch/elasticsearch/pull/3268 + See https://github.com/elastic/elasticsearch/pull/3268 Searching on more than one index, if one of those is an alias with configured routing, the shards that belonged to the other indices (without routing) were not taken into account in PlainOperationRouting#searchShardsCount. That could cause returning 1, which led to forcing the QUERY_AND_FETCH mode. diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java index 1c53c354b0d..6635a674b6a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -61,12 +61,12 @@ public class DiversifiedSamplerIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0).addMapping( - "book", "author", "type=string,index=not_analyzed", "name", "type=string,index=analyzed", "genre", - "type=string,index=not_analyzed", "price", "type=float")); + "book", "author", "type=keyword", "name", "type=keyword", "genre", + "type=keyword", "price", "type=float")); createIndex("idx_unmapped"); // idx_unmapped_author is same as main index but missing author field assertAcked(prepareCreate("idx_unmapped_author").setSettings(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS, SETTING_NUMBER_OF_REPLICAS, 0) - .addMapping("book", "name", "type=string,index=analyzed", "genre", "type=string,index=not_analyzed", "price", + .addMapping("book", "name", "type=keyword", "genre", "type=keyword", "price", "type=float")); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index 9a4b5ecc4c9..fe88689dc5b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -106,7 +106,7 @@ public class FilterIT extends ESIntegTestCase { } // See NullPointer issue when filters are empty: - // https://github.com/elasticsearch/elasticsearch/issues/8438 + // https://github.com/elastic/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { QueryBuilder emptyFilter = new BoolQueryBuilder(); SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1", emptyFilter)).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index bf252010705..21b860ad8cb 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -132,7 +132,7 @@ public class FiltersIT extends ESIntegTestCase { } // See NullPointer issue when filters are empty: - // https://github.com/elasticsearch/elasticsearch/issues/8438 + // https://github.com/elastic/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { QueryBuilder emptyFilter = new BoolQueryBuilder(); SearchResponse response = client().prepareSearch("idx") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 096376f823e..044ca4f8045 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -360,7 +360,7 @@ public class NestedIT extends ESIntegTestCase { } } - // Test based on: https://github.com/elasticsearch/elasticsearch/issues/9280 + // Test based on: https://github.com/elastic/elasticsearch/issues/9280 public void testParentFilterResolvedCorrectly() throws Exception { XContentBuilder mapping = jsonBuilder().startObject().startObject("provider").startObject("properties") .startObject("comments") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index c0cb89c4ca0..86d429a444b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -94,7 +94,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } public void testPlugin() throws Exception { - String type = randomBoolean() ? "string" : "long"; + String type = randomBoolean() ? "text" : "long"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) @@ -257,7 +257,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } public void testXContentResponse() throws Exception { - String type = randomBoolean() ? "string" : "long"; + String type = randomBoolean() ? "text" : "long"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) @@ -309,7 +309,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { indexRandom(true, false, indexRequestBuilderList); // Now create some holes in the index with selective deletes caused by updates. - // This is the scenario that caused this issue https://github.com/elasticsearch/elasticsearch/issues/7951 + // This is the scenario that caused this issue https://github.com/elastic/elasticsearch/issues/7951 // Scoring algorithms throw exceptions if term docFreqs exceed the reported size of the index // from which they are taken so need to make sure this doesn't happen. String[] text = cat1v1; @@ -333,7 +333,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } public void testBackgroundVsSeparateSet() throws Exception { - String type = randomBoolean() ? "string" : "long"; + String type = randomBoolean() ? "text" : "long"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; SharedSignificantTermsTestMethods.index01Docs(type, settings, this); testBackgroundVsSeparateSet(new MutualInformation(true, true), new MutualInformation(true, false)); @@ -460,7 +460,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } public void testScriptScore() throws ExecutionException, InterruptedException, IOException { - indexRandomFrequencies01(randomBoolean() ? "string" : "long"); + indexRandomFrequencies01(randomBoolean() ? "text" : "long"); ScriptHeuristic scriptHeuristic = getScriptSignificanceHeuristic(); ensureYellow(); SearchResponse response = client().prepareSearch(INDEX_NAME) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index 4d61f459383..870569c2002 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -49,9 +49,9 @@ public class TermsShardMinDocCountIT extends ESIntegTestCase { return randomBoolean() ? null : randomFrom(SignificantTermsAggregatorFactory.ExecutionMode.values()).toString(); } - // see https://github.com/elasticsearch/elasticsearch/issues/5998 + // see https://github.com/elastic/elasticsearch/issues/5998 public void testShardMinDocCountSignificantTermsTest() throws Exception { - String termtype = "string"; + String termtype = "text"; if (randomBoolean()) { termtype = "long"; } @@ -107,9 +107,9 @@ public class TermsShardMinDocCountIT extends ESIntegTestCase { } } - // see https://github.com/elasticsearch/elasticsearch/issues/5998 + // see https://github.com/elastic/elasticsearch/issues/5998 public void testShardMinDocCountTermsTest() throws Exception { - final String [] termTypes = {"string", "long", "integer", "float", "double"}; + final String [] termTypes = {"text", "long", "integer", "float", "double"}; String termtype = termTypes[randomInt(termTypes.length - 1)]; assertAcked(prepareCreate(index).setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).addMapping(type, "{\"properties\":{\"text\": {\"type\": \"" + termtype + "\"}}}")); diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index d74a148a2f9..9440a3e91c1 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -758,11 +758,11 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(0L)); - response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value"))).get(); + response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("parent", matchQuery("text", "value"))).get(); assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(0L)); - response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value")).score(true)) + response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("parent", matchQuery("text", "value")).score(true)) .get(); assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(0L)); @@ -1078,7 +1078,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { // Issue #3818 public void testHasChildQueryOnlyReturnsSingleChildType() { assertAcked(prepareCreate("grandissue") - .addMapping("grandparent", "name", "type=string") + .addMapping("grandparent", "name", "type=text") .addMapping("parent", "_parent", "type=grandparent") .addMapping("child_type_one", "_parent", "type=parent") .addMapping("child_type_two", "_parent", "type=parent")); @@ -1894,11 +1894,6 @@ public class ChildQuerySearchIT extends ESIntegTestCase { fail(); } catch (SearchPhaseExecutionException e) { } - - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.hasParentQuery("parent", matchAllQuery())) - .get(); - assertHitCount(response, 0); } static HasChildQueryBuilder hasChildQuery(String type, QueryBuilder queryBuilder) { diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 18a99411cf4..e96b4d69b00 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -638,7 +638,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { public void testParsingExceptionIfFieldDoesNotExist() throws Exception { assertAcked(prepareCreate("test").addMapping( "type", - jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "string") + jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("geo").field("type", "geo_point").endObject().endObject().endObject().endObject())); ensureYellow(); int numDocs = 2; diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 4ef80fcdf67..aae1f2f5e8b 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -405,7 +405,7 @@ public class QueryRescorerIT extends ESIntegTestCase { } } - // forces QUERY_THEN_FETCH because of https://github.com/elasticsearch/elasticsearch/issues/4829 + // forces QUERY_THEN_FETCH because of https://github.com/elastic/elasticsearch/issues/4829 public void testEquivalence() throws Exception { // no dummy docs since merges can change scores while we run queries. int numDocs = indexRandomNumbers("whitespace", -1, false); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index e2649b4ab94..9f898a47c06 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -113,55 +113,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(search, 0, "text", 0, equalTo("text")); } - public void testPlainHighlighterWithLongUnanalyzedStringTerm() throws IOException { - XContentBuilder mappings = jsonBuilder(); - mappings.startObject(); - mappings.startObject("type") - .startObject("properties") - .startObject("long_text") - .field("type", "string") - .field("analyzer", "keyword") - .field("index_options", "offsets") - .field("term_vector", "with_positions_offsets") - .field("ignore_above", 1) - .endObject() - .startObject("text") - .field("type", "text") - .field("analyzer", "keyword") - .field("index_options", "offsets") - .field("term_vector", "with_positions_offsets") - .endObject() - .endObject() - .endObject(); - mappings.endObject(); - assertAcked(prepareCreate("test") - .addMapping("type", mappings)); - ensureYellow(); - // crate a term that is larger than the allowed 32766, index it and then try highlight on it - // the search request should still succeed - StringBuilder builder = new StringBuilder(); - for (int i = 0; i < 32767; i++) { - builder.append('a'); - } - client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject().field("long_text", builder.toString()).field("text", "text").endObject()) - .get(); - refresh(); - String highlighter = randomFrom("plain", "postings", "fvh"); - SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) - .highlighter(new HighlightBuilder().field(new Field("*").highlighterType(highlighter))).get(); - assertHighlight(search, 0, "text", 0, equalTo("text")); - search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) - .highlighter(new HighlightBuilder().field(new Field("long_text").highlighterType(highlighter))).get(); - assertNoFailures(search); - assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); - - search = client().prepareSearch().setQuery(prefixQuery("text", "te")) - .highlighter(new HighlightBuilder().field(new Field("long_text").highlighterType(highlighter))).get(); - assertNoFailures(search); - assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); - } - public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOException { XContentBuilder mappings = jsonBuilder(); mappings.startObject(); @@ -2526,7 +2477,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[COUNT]; for (int i = 0; i < COUNT; i++) { //generating text with word to highlight in a different position - //(https://github.com/elasticsearch/elasticsearch/issues/4103) + //(https://github.com/elastic/elasticsearch/issues/4103) String prefix = randomAsciiOfLengthBetween(5, 30); prefixes.put(String.valueOf(i), prefix); indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field1", "Sentence " + prefix diff --git a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java index f5427ca6778..d93d5117274 100644 --- a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java +++ b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java @@ -315,7 +315,7 @@ public class MatchedQueriesIT extends ESIntegTestCase { } /** - * Test case for issue #4361: https://github.com/elasticsearch/elasticsearch/issues/4361 + * Test case for issue #4361: https://github.com/elastic/elasticsearch/issues/4361 */ public void testMatchedWithShould() throws Exception { createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 1f4919977bd..079363719f1 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -156,7 +156,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(client().prepareSearch().setQuery(queryStringQuery("")).get(), 0L); // return no docs } - // see https://github.com/elasticsearch/elasticsearch/issues/3177 + // see https://github.com/elastic/elasticsearch/issues/3177 public void testIssue3177() { createIndex("test"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index 35608b9e38f..894f0fbe6b9 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -86,7 +86,7 @@ public class FieldSortIT extends ESIntegTestCase { return pluginList(InternalSettingsPlugin.class); } - @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/9421") + @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/9421") public void testIssue8226() { int numIndices = between(5, 10); final boolean useMapping = randomBoolean(); @@ -180,41 +180,6 @@ public class FieldSortIT extends ESIntegTestCase { } - public void testIssue6639() throws ExecutionException, InterruptedException { - assertAcked(prepareCreate("$index") - .addMapping( - "$type", - "{\"$type\": " - + " {\"properties\": " - + " {\"grantee\": " - + " { \"index\": \"not_analyzed\", " - + " \"term_vector\": \"with_positions_offsets\", " - + " \"type\": \"string\", " - + " \"analyzer\": \"snowball\", " - + " \"boost\": 1.0, " - + " \"store\": true}}}}")); - indexRandom(true, - client().prepareIndex( - "$index", - "$type", - "data.activity.5").setSource("{\"django_ct\": \"data.activity\", \"grantee\": \"Grantee 1\"}"), - client().prepareIndex( - "$index", - "$type", - "data.activity.6").setSource("{\"django_ct\": \"data.activity\", \"grantee\": \"Grantee 2\"}")); - ensureYellow(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addSort("grantee", SortOrder.ASC) - .execute().actionGet(); - assertOrderedSearchHits(searchResponse, "data.activity.5", "data.activity.6"); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addSort("grantee", SortOrder.DESC) - .execute().actionGet(); - assertOrderedSearchHits(searchResponse, "data.activity.6", "data.activity.5"); - } - public void testTrackScores() throws Exception { createIndex("test"); ensureGreen(); @@ -928,7 +893,7 @@ public class FieldSortIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) - .addSort(SortBuilders.fieldSort("kkk").unmappedType("string")) + .addSort(SortBuilders.fieldSort("kkk").unmappedType("keyword")) .execute().actionGet(); assertNoFailures(searchResponse); } @@ -1417,7 +1382,7 @@ public class FieldSortIT extends ESIntegTestCase { } /** - * Test case for issue 6150: https://github.com/elasticsearch/elasticsearch/issues/6150 + * Test case for issue 6150: https://github.com/elastic/elasticsearch/issues/6150 */ public void testNestedSort() throws IOException, InterruptedException, ExecutionException { assertAcked(prepareCreate("test") diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java index 28994042084..d40cbf93002 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -341,7 +341,7 @@ public class GeoDistanceIT extends ESIntegTestCase { } // Regression bug: - // https://github.com/elasticsearch/elasticsearch/issues/2851 + // https://github.com/elastic/elasticsearch/issues/2851 public void testDistanceSortingWithMissingGeoPoint() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 42a3a8dc7c0..dc06c43cb85 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -280,7 +280,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { public void testCrossIndexIgnoreUnmapped() throws Exception { assertAcked(prepareCreate("test1").addMapping( - "type", "str_field1", "type=text", + "type", "str_field1", "type=keyword", "long_field", "type=long", "double_field", "type=double").get()); assertAcked(prepareCreate("test2").get()); @@ -292,8 +292,8 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { ensureYellow("test1", "test2"); SearchResponse resp = client().prepareSearch("test1", "test2") - .addSort(fieldSort("str_field").order(SortOrder.ASC).unmappedType("string")) - .addSort(fieldSort("str_field2").order(SortOrder.DESC).unmappedType("string")).get(); + .addSort(fieldSort("str_field").order(SortOrder.ASC).unmappedType("keyword")) + .addSort(fieldSort("str_field2").order(SortOrder.DESC).unmappedType("keyword")).get(); assertSortValues(resp, new Object[] {new Text("bcd"), null}, diff --git a/core/src/test/java/org/elasticsearch/search/source/SourceFetchingIT.java b/core/src/test/java/org/elasticsearch/search/source/SourceFetchingIT.java index 76086da4de5..33fcb55cada 100644 --- a/core/src/test/java/org/elasticsearch/search/source/SourceFetchingIT.java +++ b/core/src/test/java/org/elasticsearch/search/source/SourceFetchingIT.java @@ -76,7 +76,7 @@ public class SourceFetchingIT extends ESIntegTestCase { /** * Test Case for #5132: Source filtering with wildcards broken when given multiple patterns - * https://github.com/elasticsearch/elasticsearch/issues/5132 + * https://github.com/elastic/elasticsearch/issues/5132 */ public void testSourceWithWildcardFiltering() { createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/search/stats/SearchStatsUnitTests.java b/core/src/test/java/org/elasticsearch/search/stats/SearchStatsUnitTests.java index 76f20c85516..c423513ac72 100644 --- a/core/src/test/java/org/elasticsearch/search/stats/SearchStatsUnitTests.java +++ b/core/src/test/java/org/elasticsearch/search/stats/SearchStatsUnitTests.java @@ -27,7 +27,7 @@ import java.util.HashMap; import java.util.Map; public class SearchStatsUnitTests extends ESTestCase { - // https://github.com/elasticsearch/elasticsearch/issues/7644 + // https://github.com/elastic/elasticsearch/issues/7644 public void testShardLevelSearchGroupStats() throws Exception { // let's create two dummy search stats with groups Map groupStats1 = new HashMap<>(); @@ -63,4 +63,4 @@ public class SearchStatsUnitTests extends ESTestCase { assertEquals(equalTo, stats.getScrollTimeInMillis()); assertEquals(equalTo, stats.getScrollCurrent()); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index 7739124b762..e3af1c296ba 100644 --- a/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -48,7 +48,7 @@ public class SharedSignificantTermsTestMethods { public static final String CLASS_FIELD = "class"; public static void aggregateAndCheckFromSeveralShards(ESIntegTestCase testCase) throws ExecutionException, InterruptedException { - String type = ESTestCase.randomBoolean() ? "string" : "long"; + String type = ESTestCase.randomBoolean() ? "text" : "long"; String settings = "{\"index.number_of_shards\": 5, \"index.number_of_replicas\": 0}"; index01Docs(type, settings, testCase); testCase.ensureGreen(); diff --git a/core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip b/core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip index c0054fd3740..8568484235f 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip and b/core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip b/core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip index 169a0b71d75..394bbef103f 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip and b/core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip b/core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip index 44e78b4a905..a5e30e1d52e 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip and b/core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.0.zip b/core/src/test/resources/indices/bwc/index-2.0.0.zip index b16a37fffcd..58a05e66179 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.0.zip and b/core/src/test/resources/indices/bwc/index-2.0.0.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.1.zip b/core/src/test/resources/indices/bwc/index-2.0.1.zip index 3b1f32195e7..eb5c3033b86 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.1.zip and b/core/src/test/resources/indices/bwc/index-2.0.1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.2.zip b/core/src/test/resources/indices/bwc/index-2.0.2.zip index 447d37255d5..7394c8fd830 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.0.2.zip and b/core/src/test/resources/indices/bwc/index-2.0.2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.1.0.zip b/core/src/test/resources/indices/bwc/index-2.1.0.zip index 23cc65b4ab5..ebf72f047c0 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.1.0.zip and b/core/src/test/resources/indices/bwc/index-2.1.0.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.1.1.zip b/core/src/test/resources/indices/bwc/index-2.1.1.zip index fa255dfce1a..a98ffb4cb4e 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.1.1.zip and b/core/src/test/resources/indices/bwc/index-2.1.1.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.1.2.zip b/core/src/test/resources/indices/bwc/index-2.1.2.zip index 739c104a236..8488fb2a2bc 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.1.2.zip and b/core/src/test/resources/indices/bwc/index-2.1.2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.2.0.zip b/core/src/test/resources/indices/bwc/index-2.2.0.zip index f9011ffce07..b645084eeef 100644 Binary files a/core/src/test/resources/indices/bwc/index-2.2.0.zip and b/core/src/test/resources/indices/bwc/index-2.2.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip b/core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip index f57314c601c..707c7b9da4e 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip and b/core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip b/core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip index 1eb19f1d61e..d01c151d79b 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip and b/core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip b/core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip index b15cedf4d66..b66a72975a7 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip and b/core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0.zip b/core/src/test/resources/indices/bwc/repo-2.0.0.zip index 60b01723f29..6de1513fe5c 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.0.zip and b/core/src/test/resources/indices/bwc/repo-2.0.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.1.zip b/core/src/test/resources/indices/bwc/repo-2.0.1.zip index 44701b8df59..e9d9cc8a704 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.1.zip and b/core/src/test/resources/indices/bwc/repo-2.0.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.0.2.zip b/core/src/test/resources/indices/bwc/repo-2.0.2.zip index eab763106f7..08888ff3ab9 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.0.2.zip and b/core/src/test/resources/indices/bwc/repo-2.0.2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.1.0.zip b/core/src/test/resources/indices/bwc/repo-2.1.0.zip index 1165341de36..3b4bf2718e7 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.1.0.zip and b/core/src/test/resources/indices/bwc/repo-2.1.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.1.1.zip b/core/src/test/resources/indices/bwc/repo-2.1.1.zip index 2b5bce15bc7..76ac5b96454 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.1.1.zip and b/core/src/test/resources/indices/bwc/repo-2.1.1.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.1.2.zip b/core/src/test/resources/indices/bwc/repo-2.1.2.zip index a89507f0042..460a69c69cf 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.1.2.zip and b/core/src/test/resources/indices/bwc/repo-2.1.2.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.2.0.zip b/core/src/test/resources/indices/bwc/repo-2.2.0.zip index 90f8282f46a..f895e11fdd7 100644 Binary files a/core/src/test/resources/indices/bwc/repo-2.2.0.zip and b/core/src/test/resources/indices/bwc/repo-2.2.0.zip differ diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json index 7993934c34d..dd544527f52 100644 --- a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json +++ b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json @@ -6,12 +6,12 @@ "match":"multi*", "mapping":{ "type":"{dynamic_type}", - "index":"analyzed", + "index":true, "store":true, "fields":{ "org":{ - "type":"{dynamic_type}", - "index":"not_analyzed", + "type":"keyword", + "index":true, "store":true } } diff --git a/dev-tools/create_bwc_index.py b/dev-tools/create_bwc_index.py index da3aa6c8f4a..c8ade7f866a 100644 --- a/dev-tools/create_bwc_index.py +++ b/dev-tools/create_bwc_index.py @@ -247,6 +247,15 @@ def generate_index(client, version, index_name): } } + mappings['doc'] = { + 'properties': { + 'string': { + 'type': 'string', + 'boost': 4 + } + } + } + settings = { 'number_of_shards': 1, 'number_of_replicas': 0, diff --git a/docs/perl/index.asciidoc b/docs/perl/index.asciidoc index 39410ab2307..0e3e9002eb1 100644 --- a/docs/perl/index.asciidoc +++ b/docs/perl/index.asciidoc @@ -104,21 +104,21 @@ please use `Search::Elasticsearch::Client::0_90::Direct` as follows: == Reporting issues -The GitHub repository is http://github.com/elasticsearch/elasticsearch-perl +The GitHub repository is https://github.com/elastic/elasticsearch-perl and any issues can be reported on the issues list at -http://github.com/elasticsearch/elasticsearch-perl/issues. +https://github.com/elastic/elasticsearch-perl/issues. == Contributing Open source contributions are welcome. Please read our -https://github.com/elasticsearch/elasticsearch-perl/blob/master/CONTRIBUTING.asciidoc[guide to contributing]. +https://github.com/elastic/elasticsearch-perl/blob/master/CONTRIBUTING.asciidoc[guide to contributing]. == Copyright and License This software is Copyright (c) 2013-2015 by Elasticsearch BV. This is free software, licensed under: -https://github.com/elasticsearch/elasticsearch-perl/blob/master/LICENSE.txt[The Apache License Version 2.0]. +https://github.com/elastic/elasticsearch-perl/blob/master/LICENSE.txt[The Apache License Version 2.0]. diff --git a/docs/plugins/integrations.asciidoc b/docs/plugins/integrations.asciidoc index a24ad00f226..8230ab76de9 100644 --- a/docs/plugins/integrations.asciidoc +++ b/docs/plugins/integrations.asciidoc @@ -76,13 +76,13 @@ releases 2.0 and later do not support rivers. [float] ==== Supported by Elasticsearch: -* https://github.com/elasticsearch/puppet-elasticsearch[Puppet]: +* https://github.com/elastic/puppet-elasticsearch[Puppet]: Elasticsearch puppet module. [float] ==== Supported by the community: -* http://github.com/elasticsearch/cookbook-elasticsearch[Chef]: +* https://github.com/elastic/cookbook-elasticsearch[Chef]: Chef cookbook for Elasticsearch This project appears to have been abandoned: diff --git a/docs/plugins/management.asciidoc b/docs/plugins/management.asciidoc index 623f0973f43..869f9210133 100644 --- a/docs/plugins/management.asciidoc +++ b/docs/plugins/management.asciidoc @@ -38,6 +38,7 @@ A number of plugins have been contributed by our community: * https://github.com/polyfractal/elasticsearch-inquisitor[Inquisitor Plugin] (by Zachary Tong) * https://github.com/lmenezes/elasticsearch-kopf[Kopf Plugin] (by lmenezes) * https://github.com/timschlechter/swagger-for-elasticsearch[Swagger for Elasticsearch] (by Tim Schlechter) +* https://github.com/Dynatrace/Dynatrace-Elasticsearch-Plugin[Elasticsearch monitoring plugin for Dynatrace Application Monitoring] (by Dynatrace) These community plugins appear to have been abandoned: diff --git a/docs/plugins/mapper-attachments.asciidoc b/docs/plugins/mapper-attachments.asciidoc index f90c1ec9716..ed992623a50 100644 --- a/docs/plugins/mapper-attachments.asciidoc +++ b/docs/plugins/mapper-attachments.asciidoc @@ -1,7 +1,7 @@ [[mapper-attachments]] === Mapper Attachments Plugin -deprecated[3.0.0,The `mapper-attachments` plugin has been replaced by the <> plugin] +deprecated[5.0.0,The `mapper-attachments` plugin has been replaced by the <> plugin] The mapper attachments plugin lets Elasticsearch index file attachments in common formats (such as PPT, XLS, PDF) using the Apache text extraction library http://lucene.apache.org/tika/[Tika]. diff --git a/docs/plugins/mapper.asciidoc b/docs/plugins/mapper.asciidoc index 9d4fddc741b..50a21bcb345 100644 --- a/docs/plugins/mapper.asciidoc +++ b/docs/plugins/mapper.asciidoc @@ -10,7 +10,7 @@ The core mapper plugins are: <>:: -deprecated[3.0.0,The `mapper-attachments` plugin has been replaced by the <> plugin]: +deprecated[5.0.0,The `mapper-attachments` plugin has been replaced by the <> plugin]: The mapper-attachments integrates http://lucene.apache.org/tika/[Apache Tika] to provide a new field type `attachment` to allow indexing of documents such as PDFs and Microsoft Word. diff --git a/docs/reference/aggregations.asciidoc b/docs/reference/aggregations.asciidoc index b1b34ee66df..f3f24e8704c 100644 --- a/docs/reference/aggregations.asciidoc +++ b/docs/reference/aggregations.asciidoc @@ -11,7 +11,7 @@ the execution defines what this document set is (e.g. a top-level aggregation ex query/filters of the search request). There are many different types of aggregations, each with its own purpose and output. To better understand these types, -it is often easier to break them into two main families: +it is often easier to break them into three main families: <>:: A family of aggregations that build buckets, where each bucket is associated with a _key_ and a document diff --git a/docs/reference/analysis/tokenfilters/phonetic-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/phonetic-tokenfilter.asciidoc index b7e9334db9d..45e7328580b 100644 --- a/docs/reference/analysis/tokenfilters/phonetic-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/phonetic-tokenfilter.asciidoc @@ -2,4 +2,4 @@ === Phonetic Token Filter The `phonetic` token filter is provided as a plugin and located -https://github.com/elasticsearch/elasticsearch-analysis-phonetic[here]. +https://github.com/elastic/elasticsearch-analysis-phonetic[here]. diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 07fdecf101c..07d40e4149a 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -107,7 +107,7 @@ they resolve to given the current time is 22rd March 2024 noon utc. | `` | `logstash-2024.03.01` | `` | `logstash-2024.03` | `` | `logstash-2024.02` -| `` | `logstash-2024.03.23` |====== To use the characters `{` and `}` in the static part of an index name template, escape them diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 25a4f34fa46..634bc23d6ac 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -248,7 +248,7 @@ return the full updated source. The update API uses the Elasticsearch's versioning support internally to make sure the document doesn't change during the update. You can use the `version` -parameter to specify that the document should only be updated if it's version +parameter to specify that the document should only be updated if its version matches the one specified. By setting version type to `force` you can force the new version of the document after update (use with care! with `force` there is no guarantee the document didn't change).Version types `external` & diff --git a/docs/reference/indices/stats.asciidoc b/docs/reference/indices/stats.asciidoc index 98cdbbf94c7..87eda44e104 100644 --- a/docs/reference/indices/stats.asciidoc +++ b/docs/reference/indices/stats.asciidoc @@ -39,6 +39,8 @@ specified as well in the URI. Those stats can be any of: groups). The `groups` parameter accepts a comma separated list of group names. Use `_all` to return statistics for all groups. +`segments`:: Retrieve the memory use of the open segments. Optionally, setting the `include_segment_file_sizes` flag, report the aggregated disk usage of each one of the Lucene index files. + `completion`:: Completion suggest statistics. `fielddata`:: Fielddata statistics. `flush`:: Flush statistics. diff --git a/docs/reference/mapping/params/index-options.asciidoc b/docs/reference/mapping/params/index-options.asciidoc index 9e096604b76..f4608714258 100644 --- a/docs/reference/mapping/params/index-options.asciidoc +++ b/docs/reference/mapping/params/index-options.asciidoc @@ -29,7 +29,7 @@ following settings: Offsets are used by the <>. <> string fields use `positions` as the default, and -< "Quick brown fox" ------------------------------------ -Please see the full https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-model[documentation] +Please see the full https://github.com/elastic/elasticsearch-rails/tree/master/elasticsearch-model[documentation] for more information. diff --git a/docs/ruby/persistence.asciidoc b/docs/ruby/persistence.asciidoc index 1860f0167f3..7d361978ee7 100644 --- a/docs/ruby/persistence.asciidoc +++ b/docs/ruby/persistence.asciidoc @@ -80,11 +80,11 @@ The repository module provides a number of features and facilities to configure as well as support for extending your own, custom repository class. Please refer to the -https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-persistence#the-repository-pattern[documentation] +https://github.com/elastic/elasticsearch-rails/tree/master/elasticsearch-persistence#the-repository-pattern[documentation] for more information. Also, check out the -https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-persistence#example-application[example application] which demonstrates the usage patterns of the _repository_ approach to persistence. +https://github.com/elastic/elasticsearch-rails/tree/master/elasticsearch-persistence#example-application[example application] which demonstrates the usage patterns of the _repository_ approach to persistence. === Active Record @@ -219,8 +219,8 @@ article.save ------------------------------------ Please see the extensive documentation in the library -https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-persistence#the-activerecord-pattern[README] +https://github.com/elastic/elasticsearch-rails/tree/master/elasticsearch-persistence#the-activerecord-pattern[README] for detailed information. Also, check out the -https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-persistence#example-application-1[example application] which demonstrates the usage patterns of the _active record_ approach to persistence. +https://github.com/elastic/elasticsearch-rails/tree/master/elasticsearch-persistence#example-application-1[example application] which demonstrates the usage patterns of the _active record_ approach to persistence. diff --git a/docs/ruby/rails.asciidoc b/docs/ruby/rails.asciidoc index fdb4818011d..1fef3f42381 100644 --- a/docs/ruby/rails.asciidoc +++ b/docs/ruby/rails.asciidoc @@ -13,4 +13,4 @@ provides features suitable for Ruby on Rails applications. You can generate a fully working example Ruby on Rails application with templates provides. -Please refer to the https://github.com/elasticsearch/elasticsearch-rails/tree/master/elasticsearch-rails[documentation] for more information. +Please refer to the https://github.com/elastic/elasticsearch-rails/tree/master/elasticsearch-rails[documentation] for more information. diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java index 0551a4ea3f3..40631e8b535 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java @@ -404,7 +404,7 @@ public class BulkTests extends ESIntegTestCase { } /* - Test for https://github.com/elasticsearch/elasticsearch/issues/3444 + Test for https://github.com/elastic/elasticsearch/issues/3444 */ public void testBulkUpdateDocAsUpsertWithParent() throws Exception { client().admin().indices().prepareCreate("test") @@ -441,7 +441,7 @@ public class BulkTests extends ESIntegTestCase { } /* - Test for https://github.com/elasticsearch/elasticsearch/issues/3444 + Test for https://github.com/elastic/elasticsearch/issues/3444 */ public void testBulkUpdateUpsertWithParent() throws Exception { assertAcked(prepareCreate("test") @@ -475,7 +475,7 @@ public class BulkTests extends ESIntegTestCase { } /* - * Test for https://github.com/elasticsearch/elasticsearch/issues/8365 + * Test for https://github.com/elastic/elasticsearch/issues/8365 */ public void testBulkUpdateChildMissingParentRouting() throws Exception { assertAcked(prepareCreate("test").addMapping("parent", "{\"parent\":{}}").addMapping("child", diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java index 4c034b9dcbb..97447cf2df3 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java @@ -69,10 +69,10 @@ public class CardinalityTests extends ESIntegTestCase { prepareCreate("idx").addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("str_value") - .field("type", "string") + .field("type", "keyword") .endObject() .startObject("str_values") - .field("type", "string") + .field("type", "keyword") .endObject() .startObject("l_value") .field("type", "long") diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java index bf67d8bc8e8..2fb0f9f6327 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java @@ -344,7 +344,7 @@ public class EquivalenceTests extends ESIntegTestCase { assertEquals(numDocs, response.getHits().getTotalHits()); } - // https://github.com/elasticsearch/elasticsearch/issues/6435 + // https://github.com/elastic/elasticsearch/issues/6435 public void testReduce() throws Exception { createIndex("idx"); final int value = randomIntBetween(0, 10); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java index a092d35769d..4de3d448d99 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java @@ -136,7 +136,7 @@ public class IndexLookupTests extends ESIntegTestCase { expectedEndOffsetsArray.put("3", ends3); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("int_payload_field").field("type", "string").field("index_options", "offsets") + .startObject("int_payload_field").field("type", "text").field("index_options", "offsets") .field("analyzer", "payload_int").endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings( Settings.settingsBuilder() @@ -399,10 +399,10 @@ public class IndexLookupTests extends ESIntegTestCase { public void testAllExceptPosAndOffset() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("float_payload_field").field("type", "string").field("index_options", "offsets").field("term_vector", "no") - .field("analyzer", "payload_float").endObject().startObject("string_payload_field").field("type", "string") + .startObject("float_payload_field").field("type", "text").field("index_options", "offsets").field("term_vector", "no") + .field("analyzer", "payload_float").endObject().startObject("string_payload_field").field("type", "text") .field("index_options", "offsets").field("term_vector", "no").field("analyzer", "payload_string").endObject() - .startObject("int_payload_field").field("type", "string").field("index_options", "offsets") + .startObject("int_payload_field").field("type", "text").field("index_options", "offsets") .field("analyzer", "payload_int").endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings( Settings.settingsBuilder() diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java index 709308994f9..60b9460bb12 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java @@ -552,7 +552,7 @@ public class IndicesRequestTests extends ESIntegTestCase { public void testPutMapping() { interceptTransportActions(PutMappingAction.NAME); - PutMappingRequest putMappingRequest = new PutMappingRequest(randomUniqueIndicesOrAliases()).type("type").source("field", "type=string"); + PutMappingRequest putMappingRequest = new PutMappingRequest(randomUniqueIndicesOrAliases()).type("type").source("field", "type=text"); internalCluster().clientNodeClient().admin().indices().putMapping(putMappingRequest).actionGet(); clearInterceptedActions(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java index 7a6ffa5edf0..67f7d6ff0da 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java @@ -123,7 +123,7 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { } public void testScoreAccessWithinScript() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "body", "type=string", "index", + assertAcked(prepareCreate("test").addMapping("type", "body", "type=text", "index", "type=" + randomFrom("short", "float", "long", "integer", "double"))); ensureYellow(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 1b58bde91c8..9d31274be81 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -87,9 +87,9 @@ public class SearchFieldsTests extends ESIntegTestCase { // _timestamp is randomly enabled via templates but we don't want it here to test stored fields behaviour .startObject("_timestamp").field("enabled", false).endObject() .startObject("properties") - .startObject("field1").field("type", "string").field("store", true).endObject() - .startObject("field2").field("type", "string").field("store", false).endObject() - .startObject("field3").field("type", "string").field("store", true).endObject() + .startObject("field1").field("type", "text").field("store", true).endObject() + .startObject("field2").field("type", "text").field("store", false).endObject() + .startObject("field3").field("type", "text").field("store", true).endObject() .endObject().endObject().endObject().string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet(); @@ -487,7 +487,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .startObject("field1").field("type", "object").startObject("properties") .startObject("field2").field("type", "object").startObject("properties") .startObject("field3").field("type", "object").startObject("properties") - .startObject("field4").field("type", "string").field("store", true) + .startObject("field4").field("type", "text").field("store", true) .endObject().endObject() .endObject().endObject() .endObject().endObject() @@ -554,7 +554,8 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") - .startObject("string_field").field("type", "string").endObject() + .startObject("text_field").field("type", "text").endObject() + .startObject("keyword_field").field("type", "keyword").endObject() .startObject("byte_field").field("type", "byte").endObject() .startObject("short_field").field("type", "short").endObject() .startObject("integer_field").field("type", "integer").endObject() @@ -569,7 +570,8 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() - .field("string_field", "foo") + .field("text_field", "foo") + .field("keyword_field", "foo") .field("byte_field", (byte) 1) .field("short_field", (short) 2) .field("integer_field", 3) @@ -583,7 +585,8 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().indices().prepareRefresh().execute().actionGet(); SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery()) - .addFieldDataField("string_field") + .addFieldDataField("text_field") + .addFieldDataField("keyword_field") .addFieldDataField("byte_field") .addFieldDataField("short_field") .addFieldDataField("integer_field") @@ -599,7 +602,7 @@ public class SearchFieldsTests extends ESIntegTestCase { Set fields = new HashSet<>(searchResponse.getHits().getAt(0).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", - "float_field", "double_field", "date_field", "boolean_field", "string_field"))); + "float_field", "double_field", "date_field", "boolean_field", "text_field", "keyword_field"))); assertThat(searchResponse.getHits().getAt(0).fields().get("byte_field").value().toString(), equalTo("1")); assertThat(searchResponse.getHits().getAt(0).fields().get("short_field").value().toString(), equalTo("2")); @@ -609,7 +612,8 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).fields().get("double_field").value(), equalTo((Object) 6.0d)); assertThat(searchResponse.getHits().getAt(0).fields().get("date_field").value(), equalTo((Object) 1332374400000L)); assertThat(searchResponse.getHits().getAt(0).fields().get("boolean_field").value(), equalTo((Object) 1L)); - + assertThat(searchResponse.getHits().getAt(0).fields().get("text_field").value(), equalTo("foo")); + assertThat(searchResponse.getHits().getAt(0).fields().get("keyword_field").value(), equalTo("foo")); } public void testScriptFields() throws Exception { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java index 57bfd3fe10d..8cb8ebcc0a4 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java @@ -138,7 +138,7 @@ public class SimpleSortTests extends ESIntegTestCase { String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("lvalue").field("type", "long").endObject() .startObject("dvalue").field("type", "double").endObject() - .startObject("svalue").field("type", "string").endObject() + .startObject("svalue").field("type", "keyword").endObject() .startObject("gvalue").field("type", "geo_point").endObject() .endObject().endObject().endObject().string(); assertAcked(prepareCreate("test").addMapping("type1", mapping)); @@ -317,7 +317,7 @@ public class SimpleSortTests extends ESIntegTestCase { public void test2920() throws IOException { assertAcked(prepareCreate("test").addMapping( "test", - jsonBuilder().startObject().startObject("test").startObject("properties").startObject("value").field("type", "string") + jsonBuilder().startObject().startObject("test").startObject("properties").startObject("value").field("type", "keyword") .endObject().endObject().endObject().endObject())); ensureGreen(); for (int i = 0; i < 10; i++) { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index 9f00f58ae99..f17bb7ea59f 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -124,7 +124,7 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") - .startObject("text").field("type", "string").field("analyzer", "keyword").endObject() + .startObject("text").field("type", "text").field("analyzer", "keyword").endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test_2").addMapping("type1", mapping)); @@ -193,10 +193,10 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") - .field("type", "string") + .field("type", "text") .startObject("fields") .startObject("shingled") - .field("type", "string") + .field("type", "text") .field("analyzer", "biword") .field("search_analyzer", "standard") .endObject() @@ -264,10 +264,10 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") - .field("type", "string") + .field("type", "text") .startObject("fields") .startObject("shingled") - .field("type", "string") + .field("type", "text") .field("analyzer", "biword") .field("search_analyzer", "standard") .endObject() @@ -426,7 +426,7 @@ public class SuggestSearchTests extends ESIntegTestCase { // see #2817 public void testStopwordsOnlyPhraseSuggest() throws IOException { - assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=string,analyzer=stopwd").setSettings( + assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=text,analyzer=stopwd").setSettings( settingsBuilder() .put("index.analysis.analyzer.stopwd.tokenizer", "whitespace") .putArray("index.analysis.analyzer.stopwd.filter", "stop") @@ -458,9 +458,9 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject() .startObject("properties") - .startObject("body").field("type", "string").field("analyzer", "body").endObject() - .startObject("body_reverse").field("type", "string").field("analyzer", "reverse").endObject() - .startObject("bigram").field("type", "string").field("analyzer", "bigram").endObject() + .startObject("body").field("type", "text").field("analyzer", "body").endObject() + .startObject("body_reverse").field("type", "text").field("analyzer", "reverse").endObject() + .startObject("bigram").field("type", "text").field("analyzer", "bigram").endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); @@ -505,15 +505,15 @@ public class SuggestSearchTests extends ESIntegTestCase { .endObject() .startObject("properties") .startObject("body"). - field("type", "string"). + field("type", "text"). field("analyzer", "body") .endObject() .startObject("body_reverse"). - field("type", "string"). + field("type", "text"). field("analyzer", "reverse") .endObject() .startObject("bigram"). - field("type", "string"). + field("type", "text"). field("analyzer", "bigram") .endObject() .endObject() @@ -640,15 +640,15 @@ public class SuggestSearchTests extends ESIntegTestCase { .endObject() .startObject("properties") .startObject("body") - .field("type", "string") + .field("type", "text") .field("analyzer", "body") .endObject() .startObject("body_reverse") - .field("type", "string") + .field("type", "text") .field("analyzer", "reverse") .endObject() .startObject("bigram") - .field("type", "string") + .field("type", "text") .field("analyzer", "bigram") .endObject() .endObject() @@ -707,9 +707,9 @@ public class SuggestSearchTests extends ESIntegTestCase { .startObject().startObject("type1") .startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject() .startObject("properties") - .startObject("body").field("type", "string").field("analyzer", "body").endObject() - .startObject("bigram").field("type", "string").field("analyzer", "bigram").endObject() - .startObject("ngram").field("type", "string").field("analyzer", "ngram").endObject() + .startObject("body").field("type", "text").field("analyzer", "body").endObject() + .startObject("bigram").field("type", "text").field("analyzer", "bigram").endObject() + .startObject("ngram").field("type", "text").field("analyzer", "ngram").endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); @@ -802,7 +802,7 @@ public class SuggestSearchTests extends ESIntegTestCase { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("name") - .field("type", "string") + .field("type", "text") .field("analyzer", "suggest") .endObject() .endObject() @@ -844,7 +844,7 @@ public class SuggestSearchTests extends ESIntegTestCase { startObject("type1"). startObject("properties"). startObject("name"). - field("type", "string"). + field("type", "text"). field("analyzer", "suggest"). endObject(). endObject(). @@ -903,7 +903,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .endObject() .startObject("properties") .startObject("body") - .field("type", "string") + .field("type", "text") .field("analyzer", "body") .endObject() .endObject() @@ -961,7 +961,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .startObject("type1") .startObject("properties") .startObject("title") - .field("type", "string") + .field("type", "text") .field("analyzer", "text") .endObject() .endObject() @@ -1106,7 +1106,7 @@ public class SuggestSearchTests extends ESIntegTestCase { .startObject("type1") .startObject("properties") .startObject("title") - .field("type", "string") + .field("type", "text") .field("analyzer", "text") .endObject() .endObject() diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml index 205070be13e..da9c417ffeb 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml @@ -48,7 +48,6 @@ msearch: body: - index: test_1 - - query: - { "template": { "query": { "term": { "foo": { "value": "{{template}}" } } }, "params": { "template": "bar" } } } + - query: { "template": { "query": { "term": { "foo": { "value": "{{template}}" } } }, "params": { "template": "bar" } } } - match: { responses.0.hits.total: 1 } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java index 9c080d85f57..e49afafca46 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryBasicTests.java @@ -94,7 +94,7 @@ public class UpdateByQueryBasicTests extends UpdateByQueryTestCase { // Now make foo searchable assertAcked(client().admin().indices().preparePutMapping("test").setType("test") - .setSource("{\"test\": {\"properties\":{\"foo\": {\"type\": \"string\"}}}}")); + .setSource("{\"test\": {\"properties\":{\"foo\": {\"type\": \"text\"}}}}")); UpdateByQueryRequestBuilder update = request().source("test"); if (refresh != null) { update.refresh(refresh); diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml index 072c7ce7813..c23e5da95a1 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml @@ -58,6 +58,9 @@ --- "wait_for_completion=false": + - skip: + version: "0.0.0 - " + reason: breaks other tests by leaving a running reindex behind - do: index: index: source diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/30_new_fields.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/30_new_fields.yaml index 269450b3dba..22612b7f71b 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/30_new_fields.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update-by-query/30_new_fields.yaml @@ -8,7 +8,7 @@ place: properties: name: - type: string + type: text - do: cluster.health: wait_for_status: yellow @@ -27,10 +27,10 @@ body: properties: name: - type: string + type: text fields: english: - type: string + type: text analyzer: english - do: diff --git a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yaml b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yaml index c612ade421b..7fe93458af9 100644 --- a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yaml +++ b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yaml @@ -22,7 +22,7 @@ type: properties: text: - type: string + type: text analyzer: my_analyzer - do: cluster.health: diff --git a/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yaml b/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yaml index a4bc5020604..77d2aaef80d 100644 --- a/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yaml +++ b/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yaml @@ -10,7 +10,7 @@ type: properties: text: - type: string + type: text analyzer: kuromoji - do: cluster.health: diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yaml index a32eed25df2..fa57d916fc0 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yaml @@ -22,7 +22,7 @@ type: properties: text: - type: string + type: text analyzer: my_analyzer - do: cluster.health: diff --git a/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/20_search.yaml b/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/20_search.yaml index 9e4c81fcc8f..5aec77f8ce2 100644 --- a/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/20_search.yaml +++ b/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/20_search.yaml @@ -10,7 +10,7 @@ type: properties: text: - type: string + type: text analyzer: smartcn - do: cluster.health: diff --git a/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/20_search.yaml b/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/20_search.yaml index eccbcf4c1bd..f699440cb4f 100644 --- a/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/20_search.yaml +++ b/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/20_search.yaml @@ -10,7 +10,7 @@ type: properties: text: - type: string + type: text analyzer: polish - do: cluster.health: diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java index 0f2d96573ef..57a6482f676 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java @@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.nullValue; /** * Reported issue in #15 - * (https://github.com/elasticsearch/elasticsearch-cloud-azure/issues/15) + * (https://github.com/elastic/elasticsearch-cloud-azure/issues/15) */ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 0, diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index 738befe9d16..d162ad8894c 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -146,7 +146,7 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas logger.trace("gce instance {} with status {} found.", name, status); // We don't want to connect to TERMINATED status instances - // See https://github.com/elasticsearch/elasticsearch-cloud-gce/issues/3 + // See https://github.com/elastic/elasticsearch-cloud-gce/issues/3 if (Status.TERMINATED.equals(status)) { logger.debug("node {} is TERMINATED. Ignoring", name); continue; diff --git a/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml b/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml index 6e6266ee9c9..fc5dcba8bcb 100644 --- a/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml +++ b/plugins/lang-python/src/test/resources/rest-api-spec/test/lang_python/30_update.yaml @@ -1,7 +1,7 @@ # Integration tests for Lang Python components # -# Test case for #4: https://github.com/elasticsearch/elasticsearch-lang-python/issues/4 +# Test case for #4: https://github.com/elastic/elasticsearch-lang-python/issues/4 # Update request that uses python script with no parameters fails with NullPointerException # "Python Update With Empty Parameters": @@ -30,7 +30,7 @@ --- -# Test case for #19: https://github.com/elasticsearch/elasticsearch-lang-python/issues/19 +# Test case for #19: https://github.com/elastic/elasticsearch-lang-python/issues/19 # Multi-line or multi-statement Python scripts raise NullPointerException # "Python Update With Multi Lines": diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index 1635175a528..dd721f77ea0 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -299,7 +299,7 @@ public class AttachmentMapper extends FieldMapper { if (typeNode != null) { type = typeNode.toString(); } else { - type = "string"; + type = "text"; } Mapper.TypeParser typeParser = parserContext.typeParser(type); Mapper.Builder mapperBuilder = typeParser.parse(propName, propNode, parserContext); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java index 21627daeb53..a10910cf62a 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; /** - * Test for https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/18 + * Test for https://github.com/elastic/elasticsearch-mapper-attachments/issues/18 * Note that we have converted /org/elasticsearch/index/mapper/xcontent/testContentLength.txt * to a /org/elasticsearch/index/mapper/xcontent/encrypted.pdf with password `12345678`. */ diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java index b44a6d55eb9..cb8f6746a84 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java @@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; /** - * Test for https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/38 + * Test for https://github.com/elastic/elasticsearch-mapper-attachments/issues/38 */ public class MetadataMapperTests extends AttachmentUnitTestCase { diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java index 9475c85a5f4..17e4ec6818a 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java @@ -63,7 +63,7 @@ public class VariousDocTests extends AttachmentUnitTestCase { } /** - * Test for https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/104 + * Test for https://github.com/elastic/elasticsearch-mapper-attachments/issues/104 */ public void testWordDocxDocument104() throws Exception { assertParseable("issue-104.docx"); @@ -112,7 +112,7 @@ public class VariousDocTests extends AttachmentUnitTestCase { /** * Test for ASCIIDOC - * Not yet supported by Tika: https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/29 + * Not yet supported by Tika: https://github.com/elastic/elasticsearch-mapper-attachments/issues/29 */ public void testAsciidocDocument() throws Exception { assertParseable("asciidoc.asciidoc"); diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/10_index.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/10_index.yaml index 8b2c4b0ea18..3f3754109ee 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/10_index.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/10_index.yaml @@ -2,7 +2,7 @@ # --- -# https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/23 +# https://github.com/elastic/elasticsearch-mapper-attachments/issues/23 "Index empty attachment": - do: @@ -28,7 +28,7 @@ file: { } --- -# https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/18 +# https://github.com/elastic/elasticsearch-mapper-attachments/issues/18 # Encoded content with https://www.base64encode.org/ # File1 # TIMEOUT_SETTING = Setting.affixKeySetting( + TIMEOUT_KEY, + (s) -> Storage.TIMEOUT_SETTING.get(s).toString(), + (s) -> Setting.parseTimeValue(s, TimeValue.timeValueSeconds(-1), TIMEOUT_KEY.toString()), + false, + Setting.Scope.CLUSTER); + private static final Setting ACCOUNT_SETTING = Setting.adfixKeySetting(Storage.PREFIX, ACCOUNT_SUFFIX, "", Function.identity(), false, Setting.Scope.CLUSTER); + private static final Setting KEY_SETTING = Setting.adfixKeySetting(Storage.PREFIX, KEY_SUFFIX, "", Function.identity(), false, Setting.Scope.CLUSTER); + private static final Setting DEFAULT_SETTING = Setting.adfixKeySetting(Storage.PREFIX, DEFAULT_SUFFIX, "false", Boolean::valueOf, false, Setting.Scope.CLUSTER); -public class AzureStorageSettings { - private static ESLogger logger = ESLoggerFactory.getLogger(AzureStorageSettings.class.getName()); private final String name; private final String account; private final String key; private final TimeValue timeout; + private final boolean activeByDefault; - public AzureStorageSettings(String name, String account, String key, TimeValue timeout) { + public AzureStorageSettings(String name, String account, String key, TimeValue timeout, boolean activeByDefault) { this.name = name; this.account = account; this.key = key; this.timeout = timeout; + this.activeByDefault = activeByDefault; } public String getName() { @@ -62,12 +83,17 @@ public class AzureStorageSettings { return timeout; } + public boolean isActiveByDefault() { + return activeByDefault; + } + @Override public String toString() { final StringBuilder sb = new StringBuilder("AzureStorageSettings{"); sb.append("name='").append(name).append('\''); sb.append(", account='").append(account).append('\''); sb.append(", key='").append(key).append('\''); + sb.append(", activeByDefault='").append(activeByDefault).append('\''); sb.append(", timeout=").append(timeout); sb.append('}'); return sb.toString(); @@ -79,49 +105,70 @@ public class AzureStorageSettings { * @return A tuple with v1 = primary storage and v2 = secondary storage */ public static Tuple> parse(Settings settings) { - AzureStorageSettings primaryStorage = null; - Map secondaryStorage = new HashMap<>(); + List storageSettings = createStorageSettings(settings); + return Tuple.tuple(getPrimary(storageSettings), getSecondaries(storageSettings)); + } - TimeValue globalTimeout = Storage.TIMEOUT_SETTING.get(settings); + private static List createStorageSettings(Settings settings) { + Setting storageGroupSetting = Setting.groupSetting(Storage.PREFIX, false, Setting.Scope.CLUSTER); + // ignore global timeout which has the same prefix but does not belong to any group + Settings groups = storageGroupSetting.get(settings.filter((k) -> k.equals(Storage.TIMEOUT_SETTING.getKey()) == false)); + List storageSettings = new ArrayList<>(); + for (String groupName : groups.getAsGroups().keySet()) { + storageSettings.add( + new AzureStorageSettings( + groupName, + getValue(settings, groupName, ACCOUNT_SETTING), + getValue(settings, groupName, KEY_SETTING), + getValue(settings, groupName, TIMEOUT_SETTING), + getValue(settings, groupName, DEFAULT_SETTING)) + ); + } + return storageSettings; + } - Settings storageSettings = settings.getByPrefix(Storage.PREFIX); - if (storageSettings != null) { - Map asMap = storageSettings.getAsStructuredMap(); - for (Map.Entry storage : asMap.entrySet()) { - if (storage.getValue() instanceof Map) { - @SuppressWarnings("unchecked") - Map map = (Map) storage.getValue(); - TimeValue timeout = TimeValue.parseTimeValue(map.get("timeout"), globalTimeout, Storage.PREFIX + storage.getKey() + ".timeout"); - AzureStorageSettings current = new AzureStorageSettings(storage.getKey(), map.get("account"), map.get("key"), timeout); - boolean activeByDefault = Boolean.parseBoolean(map.getOrDefault("default", "false")); - if (activeByDefault) { - if (primaryStorage == null) { - primaryStorage = current; - } else { - logger.warn("default storage settings has already been defined. You can not define it to [{}]", storage.getKey()); - secondaryStorage.put(storage.getKey(), current); - } + private static T getValue(Settings settings, String groupName, Setting setting) { + Setting.AffixKey k = (Setting.AffixKey) setting.getRawKey(); + String fullKey = k.toConcreteKey(groupName).toString(); + return setting.getConcreteSetting(fullKey).get(settings); + } + + private static AzureStorageSettings getPrimary(List settings) { + if (settings.isEmpty()) { + return null; + } else if (settings.size() == 1) { + // the only storage settings belong (implicitly) to the default primary storage + AzureStorageSettings storage = settings.get(0); + return new AzureStorageSettings(storage.getName(), storage.getAccount(), storage.getKey(), storage.getTimeout(), true); + } else { + AzureStorageSettings primary = null; + for (AzureStorageSettings setting : settings) { + if (setting.isActiveByDefault()) { + if (primary == null) { + primary = setting; } else { - secondaryStorage.put(storage.getKey(), current); + throw new SettingsException("Multiple default Azure data stores configured: [" + primary.getName() + "] and [" + setting.getName() + "]"); } } } - // If we did not set any default storage, we should complain and define it - if (primaryStorage == null && secondaryStorage.isEmpty() == false) { - Map.Entry fallback = secondaryStorage.entrySet().iterator().next(); - // We only warn if the number of secondary storage if > to 1 - // If the user defined only one storage account, that's fine. We know it's the default one. - if (secondaryStorage.size() > 1) { - logger.warn("no default storage settings has been defined. " + - "Add \"default\": true to the settings you want to activate by default. " + - "Forcing default to [{}].", fallback.getKey()); + if (primary == null) { + throw new SettingsException("No default Azure data store configured"); + } + return primary; + } + } + + private static Map getSecondaries(List settings) { + Map secondaries = new HashMap<>(); + // when only one setting is defined, we don't have secondaries + if (settings.size() > 1) { + for (AzureStorageSettings setting : settings) { + if (setting.isActiveByDefault() == false) { + secondaries.put(setting.getName(), setting); } - primaryStorage = fallback.getValue(); - secondaryStorage.remove(fallback.getKey()); } } - - return Tuple.tuple(primaryStorage, secondaryStorage); + return Collections.unmodifiableMap(secondaries); } public static T getValue(RepositorySettings repositorySettings, diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTests.java index 5347be09da0..ac2c51441f9 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSettingsParserTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cloud.azure.storage.AzureStorageSettings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import java.util.Map; @@ -73,14 +74,12 @@ public class AzureSettingsParserTests extends LuceneTestCase { .put("cloud.azure.storage.azure2.key", "mykey2") .build(); - Tuple> tuple = AzureStorageSettings.parse(settings); - assertThat(tuple.v1(), notNullValue()); - assertThat(tuple.v1().getAccount(), is("myaccount1")); - assertThat(tuple.v1().getKey(), is("mykey1")); - assertThat(tuple.v2().keySet(), hasSize(1)); - assertThat(tuple.v2().get("azure2"), notNullValue()); - assertThat(tuple.v2().get("azure2").getAccount(), is("myaccount2")); - assertThat(tuple.v2().get("azure2").getKey(), is("mykey2")); + try { + AzureStorageSettings.parse(settings); + fail("Should have failed with a SettingsException (no default data store)"); + } catch (SettingsException ex) { + assertEquals(ex.getMessage(), "No default Azure data store configured"); + } } public void testParseTwoSettingsTooManyDefaultSet() { @@ -93,14 +92,13 @@ public class AzureSettingsParserTests extends LuceneTestCase { .put("cloud.azure.storage.azure2.default", true) .build(); - Tuple> tuple = AzureStorageSettings.parse(settings); - assertThat(tuple.v1(), notNullValue()); - assertThat(tuple.v1().getAccount(), is("myaccount1")); - assertThat(tuple.v1().getKey(), is("mykey1")); - assertThat(tuple.v2().keySet(), hasSize(1)); - assertThat(tuple.v2().get("azure2"), notNullValue()); - assertThat(tuple.v2().get("azure2").getAccount(), is("myaccount2")); - assertThat(tuple.v2().get("azure2").getKey(), is("mykey2")); + try { + AzureStorageSettings.parse(settings); + fail("Should have failed with a SettingsException (multiple default data stores)"); + } catch (SettingsException ex) { + assertEquals(ex.getMessage(), "Multiple default Azure data stores configured: [azure1] and [azure2]"); + } + } public void testParseEmptySettings() { diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java index 7f6f3106fc3..09c77da4633 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java @@ -174,7 +174,7 @@ public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCa } /** - * For issue #51: https://github.com/elasticsearch/elasticsearch-cloud-azure/issues/51 + * For issue #51: https://github.com/elastic/elasticsearch-cloud-azure/issues/51 */ public void testMultipleSnapshots() throws URISyntaxException, StorageException { final String indexName = "test-idx-1"; @@ -298,7 +298,7 @@ public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCa } /** - * For issue #26: https://github.com/elasticsearch/elasticsearch-cloud-azure/issues/26 + * For issue #26: https://github.com/elastic/elasticsearch-cloud-azure/issues/26 */ public void testListBlobs_26() throws StorageException, URISyntaxException { createIndex("test-idx-1", "test-idx-2", "test-idx-3"); @@ -356,7 +356,7 @@ public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCa } /** - * For issue #28: https://github.com/elasticsearch/elasticsearch-cloud-azure/issues/28 + * For issue #28: https://github.com/elastic/elasticsearch-cloud-azure/issues/28 */ public void testGetDeleteNonExistingSnapshot_28() throws StorageException, URISyntaxException { ClusterAdminClient client = client().admin().cluster(); @@ -383,7 +383,7 @@ public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCa } /** - * For issue #21: https://github.com/elasticsearch/elasticsearch-cloud-azure/issues/21 + * For issue #21: https://github.com/elastic/elasticsearch-cloud-azure/issues/21 */ public void testForbiddenContainerName() throws Exception { checkContainerName("", false); @@ -437,7 +437,7 @@ public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCa } /** - * Test case for issue #23: https://github.com/elasticsearch/elasticsearch-cloud-azure/issues/23 + * Test case for issue #23: https://github.com/elastic/elasticsearch-cloud-azure/issues/23 */ public void testNonExistingRepo_23() { Client client = client(); diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml index 47559226275..aecb000a1ed 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml @@ -62,10 +62,10 @@ - match: { test_index.shards.0.type: SNAPSHOT } - match: { test_index.shards.0.stage: DONE } - - match: { test_index.shards.0.index.files.recovered: 1} - - gt: { test_index.shards.0.index.size.recovered_in_bytes: 0} - - match: { test_index.shards.0.index.files.reused: 0} - - match: { test_index.shards.0.index.size.reused_in_bytes: 0} + - match: { test_index.shards.0.index.files.recovered: 0} + - match: { test_index.shards.0.index.size.recovered_in_bytes: 0} + - match: { test_index.shards.0.index.files.reused: 1} + - gt: { test_index.shards.0.index.size.reused_in_bytes: 0} # Remove our snapshot - do: diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java index 8cc53d669b3..793e442e119 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java @@ -352,7 +352,7 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase } /** - * Test case for issue #86: https://github.com/elasticsearch/elasticsearch-cloud-aws/issues/86 + * Test case for issue #86: https://github.com/elastic/elasticsearch-cloud-aws/issues/86 */ public void testNonExistingRepo86() { Client client = client(); @@ -373,7 +373,7 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase } /** - * For issue #86: https://github.com/elasticsearch/elasticsearch-cloud-aws/issues/86 + * For issue #86: https://github.com/elastic/elasticsearch-cloud-aws/issues/86 */ public void testGetDeleteNonExistingSnapshot86() { ClusterAdminClient client = client().admin().cluster(); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilJNANativesTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilJNANativesTests.java new file mode 100644 index 00000000000..080eee2501b --- /dev/null +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilJNANativesTests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.apache.lucene.util.Constants; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.file.Files; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class EvilJNANativesTests extends ESTestCase { + + public void testSetMaximumNumberOfThreads() throws IOException { + if (Constants.LINUX) { + final List lines = Files.readAllLines(PathUtils.get("/proc/self/limits")); + if (!lines.isEmpty()) { + for (String line : lines) { + if (line != null && line.startsWith("Max processes")) { + final String[] fields = line.split("\\s+"); + final long limit = Long.parseLong(fields[2]); + assertThat(JNANatives.MAX_NUMBER_OF_THREADS, equalTo(limit)); + return; + } + } + } + fail("should have read max processes from /proc/self/limits"); + } else { + assertThat(JNANatives.MAX_NUMBER_OF_THREADS, equalTo(-1L)); + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml index 8d1dee13893..c205e29ff6f 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodeattrs/10_basic.yaml @@ -20,12 +20,14 @@ "Test cat nodes attrs output": - do: cat.nodeattrs: {} - - match: # All attributes look good + # All attributes look good + - match: $body: | /^# node\s+ host\s+ ip\s+ attr\s+ value\s* \n (((\S+\s?){1,10})\s+(\S+)\s+(\d{1,3}\.){3}\d{1,3}\s+(\S+)\s+ (\S+)\s* \n)+ $/ - - match: # A specific planted attribute is present and looks good + # A specific planted attribute is present and looks good + - match: $body: | /# node\s+ host\s+ ip\s+ attr\s+ value\s* \n (\S+(\s\S+){0,7})\s+ (\S+)\s+ (\d{1,3}\.){3}\d{1,3}\s+testattr\s+ test \s* \n @@ -37,12 +39,14 @@ - do: cat.nodeattrs: v: true - - match: # All attributes look good including the heading + # All attributes look good including the heading + - match: $body: | /^ node\s+ host\s+ ip\s+ attr\s+ value\s* \n (((\S+\s?){1,10})\s+(\S+)\s+(\d{1,3}\.){3}\d{1,3}\s+(\S+)\s+ (\S+)\s* \n)+ $/ - - match: # A specific planted attribute is present and looks good + # A specific planted attribute is present and looks good + - match: $body: | /# node\s+ host\s+ ip\s+ attr\s+ value\s* \n (\S+(\s\S+){0,7})\s+ (\S+)\s+ (\d{1,3}\.){3}\d{1,3}\s+ testattr\s+ test \s* \n @@ -52,12 +56,14 @@ cat.nodeattrs: h: attr,value v: true - - match: # All attributes look good + # All attributes look good + - match: $body: | /^ attr\s+ value\s*\n ((\S+)\s+ (\S+)\s*)+ $/ - - match: # A specific planted attribute is present and looks good + # A specific planted attribute is present and looks good + - match: $body: | /# attr\s+ value\s*\n testattr\s+ test\s*\n diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_stats/10_basics.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_stats/10_basics.yaml index 165ed9f469c..7ffa0981eb6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/field_stats/10_basics.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_stats/10_basics.yaml @@ -1,5 +1,15 @@ ---- -"Basic field stats": +setup: + - do: + indices.create: + index: test_1 + body: + mappings: + test: + properties: + foo: + type: text + number: + type: long - do: index: index: test_1 @@ -10,6 +20,8 @@ - do: indices.refresh: {} +--- +"Basic field stats": - do: field_stats: index: test_1 @@ -28,16 +40,6 @@ --- "Basic field stats with level set to indices": - - do: - index: - index: test_1 - type: test - id: id_1 - body: { foo: "bar", number: 123 } - - - do: - indices.refresh: {} - - do: field_stats: index: test_1 @@ -57,15 +59,6 @@ --- "Field stats with filtering": - - do: - index: - index: test_1 - type: test - id: id_1 - body: { foo: "bar", number: 123 } - - - do: - indices.refresh: {} - do: field_stats: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.restore/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.restore/10_basic.yaml index d471853a95b..a7330ac418c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.restore/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.restore/10_basic.yaml @@ -53,7 +53,7 @@ setup: - match: { test_index.shards.0.type: SNAPSHOT } - match: { test_index.shards.0.stage: DONE } - - match: { test_index.shards.0.index.files.recovered: 1} - - gt: { test_index.shards.0.index.size.recovered_in_bytes: 0} - - match: { test_index.shards.0.index.files.reused: 0} - - match: { test_index.shards.0.index.size.reused_in_bytes: 0} + - match: { test_index.shards.0.index.files.recovered: 0} + - match: { test_index.shards.0.index.size.recovered_in_bytes: 0} + - match: { test_index.shards.0.index.files.reused: 1} + - gt: { test_index.shards.0.index.size.reused_in_bytes: 0} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index e80bb93aeb7..4a20d3c3fd6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -651,4 +651,16 @@ public abstract class ESTestCase extends LuceneTestCase { } throw new AssertionFailedError("Expected exception " + expectedType.getSimpleName()); } + + protected static long spinForAtLeastOneMillisecond() { + long nanosecondsInMillisecond = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS); + // force at least one millisecond to elapse, but ensure the + // clock has enough resolution to observe the passage of time + long start = System.nanoTime(); + long elapsed; + while ((elapsed = (System.nanoTime() - start)) < nanosecondsInMillisecond) { + // busy spin + } + return elapsed; + } }