diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml
index 51349520810..d50f362a73e 100644
--- a/buildSrc/src/main/resources/checkstyle_suppressions.xml
+++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml
@@ -10,6 +10,9 @@
+
+
+
@@ -21,7 +24,6 @@
-
@@ -301,7 +303,6 @@
-
@@ -468,7 +469,6 @@
-
@@ -903,8 +903,6 @@
-
-
@@ -992,7 +990,6 @@
-
diff --git a/core/src/main/java/org/apache/logging/log4j/core/impl/ThrowableProxy.java b/core/src/main/java/org/apache/logging/log4j/core/impl/ThrowableProxy.java
new file mode 100644
index 00000000000..37ab0a15391
--- /dev/null
+++ b/core/src/main/java/org/apache/logging/log4j/core/impl/ThrowableProxy.java
@@ -0,0 +1,665 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache license, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the license for the specific language governing permissions and
+ * limitations under the license.
+ */
+
+package org.apache.logging.log4j.core.impl;
+
+import java.io.Serializable;
+import java.net.URL;
+import java.security.CodeSource;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Stack;
+
+import org.apache.logging.log4j.core.util.Loader;
+import org.apache.logging.log4j.status.StatusLogger;
+import org.apache.logging.log4j.util.ReflectionUtil;
+import org.apache.logging.log4j.util.Strings;
+
+/**
+ * Wraps a Throwable to add packaging information about each stack trace element.
+ *
+ *
+ * A proxy is used to represent a throwable that may not exist in a different class loader or JVM. When an application
+ * deserializes a ThrowableProxy, the throwable may not be set, but the throwable's information is preserved in other
+ * fields of the proxy like the message and stack trace.
+ *
+ *
+ *
+ * TODO: Move this class to org.apache.logging.log4j.core because it is used from LogEvent.
+ *
+ *
+ * TODO: Deserialize: Try to rebuild Throwable if the target exception is in this class loader?
+ *
+ */
+public class ThrowableProxy implements Serializable {
+
+ private static final String CAUSED_BY_LABEL = "Caused by: ";
+ private static final String SUPPRESSED_LABEL = "Suppressed: ";
+ private static final String WRAPPED_BY_LABEL = "Wrapped by: ";
+
+ /**
+ * Cached StackTracePackageElement and ClassLoader.
+ *
+ * Consider this class private.
+ *
+ */
+ static class CacheEntry {
+ private final ExtendedClassInfo element;
+ private final ClassLoader loader;
+
+ public CacheEntry(final ExtendedClassInfo element, final ClassLoader loader) {
+ this.element = element;
+ this.loader = loader;
+ }
+ }
+
+ private static final ThrowableProxy[] EMPTY_THROWABLE_PROXY_ARRAY = new ThrowableProxy[0];
+
+ private static final char EOL = '\n';
+
+ private static final long serialVersionUID = -2752771578252251910L;
+
+ private final ThrowableProxy causeProxy;
+
+ private int commonElementCount;
+
+ private final ExtendedStackTraceElement[] extendedStackTrace;
+
+ private final String localizedMessage;
+
+ private final String message;
+
+ private final String name;
+
+ private final ThrowableProxy[] suppressedProxies;
+
+ private final transient Throwable throwable;
+
+ /**
+ * For JSON and XML IO via Jackson.
+ */
+ @SuppressWarnings("unused")
+ private ThrowableProxy() {
+ this.throwable = null;
+ this.name = null;
+ this.extendedStackTrace = null;
+ this.causeProxy = null;
+ this.message = null;
+ this.localizedMessage = null;
+ this.suppressedProxies = EMPTY_THROWABLE_PROXY_ARRAY;
+ }
+
+ /**
+ * Constructs the wrapper for the Throwable that includes packaging data.
+ *
+ * @param throwable
+ * The Throwable to wrap, must not be null.
+ */
+ public ThrowableProxy(final Throwable throwable) {
+ this(throwable, null);
+ }
+
+ /**
+ * Constructs the wrapper for the Throwable that includes packaging data.
+ *
+ * @param throwable
+ * The Throwable to wrap, must not be null.
+ * @param visited
+ * The set of visited suppressed exceptions.
+ */
+ private ThrowableProxy(final Throwable throwable, final Set visited) {
+ this.throwable = throwable;
+ this.name = throwable.getClass().getName();
+ this.message = throwable.getMessage();
+ this.localizedMessage = throwable.getLocalizedMessage();
+ final Map map = new HashMap<>();
+ final Stack> stack = ReflectionUtil.getCurrentStackTrace();
+ this.extendedStackTrace = this.toExtendedStackTrace(stack, map, null, throwable.getStackTrace());
+ final Throwable throwableCause = throwable.getCause();
+ final Set causeVisited = new HashSet<>(1);
+ this.causeProxy = throwableCause == null ? null : new ThrowableProxy(throwable, stack, map, throwableCause, visited, causeVisited);
+ this.suppressedProxies = this.toSuppressedProxies(throwable, visited);
+ }
+
+ /**
+ * Constructs the wrapper for a Throwable that is referenced as the cause by another Throwable.
+ *
+ * @param parent
+ * The Throwable referencing this Throwable.
+ * @param stack
+ * The Class stack.
+ * @param map
+ * The cache containing the packaging data.
+ * @param cause
+ * The Throwable to wrap.
+ * @param suppressedVisited TODO
+ * @param causeVisited TODO
+ */
+ private ThrowableProxy(final Throwable parent, final Stack> stack, final Map map,
+ final Throwable cause, final Set suppressedVisited, final Set causeVisited) {
+ causeVisited.add(cause);
+ this.throwable = cause;
+ this.name = cause.getClass().getName();
+ this.message = this.throwable.getMessage();
+ this.localizedMessage = this.throwable.getLocalizedMessage();
+ this.extendedStackTrace = this.toExtendedStackTrace(stack, map, parent.getStackTrace(), cause.getStackTrace());
+ final Throwable causeCause = cause.getCause();
+ this.causeProxy = causeCause == null || causeVisited.contains(causeCause) ? null : new ThrowableProxy(parent,
+ stack, map, causeCause, suppressedVisited, causeVisited);
+ this.suppressedProxies = this.toSuppressedProxies(cause, suppressedVisited);
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (this.getClass() != obj.getClass()) {
+ return false;
+ }
+ final ThrowableProxy other = (ThrowableProxy) obj;
+ if (this.causeProxy == null) {
+ if (other.causeProxy != null) {
+ return false;
+ }
+ } else if (!this.causeProxy.equals(other.causeProxy)) {
+ return false;
+ }
+ if (this.commonElementCount != other.commonElementCount) {
+ return false;
+ }
+ if (this.name == null) {
+ if (other.name != null) {
+ return false;
+ }
+ } else if (!this.name.equals(other.name)) {
+ return false;
+ }
+ if (!Arrays.equals(this.extendedStackTrace, other.extendedStackTrace)) {
+ return false;
+ }
+ if (!Arrays.equals(this.suppressedProxies, other.suppressedProxies)) {
+ return false;
+ }
+ return true;
+ }
+
+ private void formatCause(final StringBuilder sb, final String prefix, final ThrowableProxy cause, final List ignorePackages) {
+ formatThrowableProxy(sb, prefix, CAUSED_BY_LABEL, cause, ignorePackages);
+ }
+
+ private void formatThrowableProxy(final StringBuilder sb, final String prefix, final String causeLabel,
+ final ThrowableProxy throwableProxy, final List ignorePackages) {
+ if (throwableProxy == null) {
+ return;
+ }
+ sb.append(prefix).append(causeLabel).append(throwableProxy).append(EOL);
+ this.formatElements(sb, prefix, throwableProxy.commonElementCount,
+ throwableProxy.getStackTrace(), throwableProxy.extendedStackTrace, ignorePackages);
+ this.formatSuppressed(sb, prefix + "\t", throwableProxy.suppressedProxies, ignorePackages);
+ this.formatCause(sb, prefix, throwableProxy.causeProxy, ignorePackages);
+ }
+
+ private void formatSuppressed(final StringBuilder sb, final String prefix, final ThrowableProxy[] suppressedProxies,
+ final List ignorePackages) {
+ if (suppressedProxies == null) {
+ return;
+ }
+ for (final ThrowableProxy suppressedProxy : suppressedProxies) {
+ final ThrowableProxy cause = suppressedProxy;
+ formatThrowableProxy(sb, prefix, SUPPRESSED_LABEL, cause, ignorePackages);
+ }
+ }
+
+ private void formatElements(final StringBuilder sb, final String prefix, final int commonCount,
+ final StackTraceElement[] causedTrace, final ExtendedStackTraceElement[] extStackTrace,
+ final List ignorePackages) {
+ if (ignorePackages == null || ignorePackages.isEmpty()) {
+ for (final ExtendedStackTraceElement element : extStackTrace) {
+ this.formatEntry(element, sb, prefix);
+ }
+ } else {
+ int count = 0;
+ for (int i = 0; i < extStackTrace.length; ++i) {
+ if (!this.ignoreElement(causedTrace[i], ignorePackages)) {
+ if (count > 0) {
+ appendSuppressedCount(sb, prefix, count);
+ count = 0;
+ }
+ this.formatEntry(extStackTrace[i], sb, prefix);
+ } else {
+ ++count;
+ }
+ }
+ if (count > 0) {
+ appendSuppressedCount(sb, prefix, count);
+ }
+ }
+ if (commonCount != 0) {
+ sb.append(prefix).append("\t... ").append(commonCount).append(" more").append(EOL);
+ }
+ }
+
+ private void appendSuppressedCount(final StringBuilder sb, final String prefix, final int count) {
+ sb.append(prefix);
+ if (count == 1) {
+ sb.append("\t....").append(EOL);
+ } else {
+ sb.append("\t... suppressed ").append(count).append(" lines").append(EOL);
+ }
+ }
+
+ private void formatEntry(final ExtendedStackTraceElement extStackTraceElement, final StringBuilder sb, final String prefix) {
+ sb.append(prefix);
+ sb.append("\tat ");
+ sb.append(extStackTraceElement);
+ sb.append(EOL);
+ }
+
+ /**
+ * Formats the specified Throwable.
+ *
+ * @param sb
+ * StringBuilder to contain the formatted Throwable.
+ * @param cause
+ * The Throwable to format.
+ */
+ public void formatWrapper(final StringBuilder sb, final ThrowableProxy cause) {
+ this.formatWrapper(sb, cause, null);
+ }
+
+ /**
+ * Formats the specified Throwable.
+ *
+ * @param sb
+ * StringBuilder to contain the formatted Throwable.
+ * @param cause
+ * The Throwable to format.
+ * @param packages
+ * The List of packages to be suppressed from the trace.
+ */
+ @SuppressWarnings("ThrowableResultOfMethodCallIgnored")
+ public void formatWrapper(final StringBuilder sb, final ThrowableProxy cause, final List packages) {
+ final Throwable caused = cause.getCauseProxy() != null ? cause.getCauseProxy().getThrowable() : null;
+ if (caused != null) {
+ this.formatWrapper(sb, cause.causeProxy);
+ sb.append(WRAPPED_BY_LABEL);
+ }
+ sb.append(cause).append(EOL);
+ this.formatElements(sb, "", cause.commonElementCount,
+ cause.getThrowable().getStackTrace(), cause.extendedStackTrace, packages);
+ }
+
+ public ThrowableProxy getCauseProxy() {
+ return this.causeProxy;
+ }
+
+ /**
+ * Format the Throwable that is the cause of this Throwable.
+ *
+ * @return The formatted Throwable that caused this Throwable.
+ */
+ public String getCauseStackTraceAsString() {
+ return this.getCauseStackTraceAsString(null);
+ }
+
+ /**
+ * Format the Throwable that is the cause of this Throwable.
+ *
+ * @param packages
+ * The List of packages to be suppressed from the trace.
+ * @return The formatted Throwable that caused this Throwable.
+ */
+ public String getCauseStackTraceAsString(final List packages) {
+ final StringBuilder sb = new StringBuilder();
+ if (this.causeProxy != null) {
+ this.formatWrapper(sb, this.causeProxy);
+ sb.append(WRAPPED_BY_LABEL);
+ }
+ sb.append(this.toString());
+ sb.append(EOL);
+ this.formatElements(sb, "", 0, this.throwable.getStackTrace(), this.extendedStackTrace, packages);
+ return sb.toString();
+ }
+
+ /**
+ * Return the number of elements that are being omitted because they are common with the parent Throwable's stack
+ * trace.
+ *
+ * @return The number of elements omitted from the stack trace.
+ */
+ public int getCommonElementCount() {
+ return this.commonElementCount;
+ }
+
+ /**
+ * Gets the stack trace including packaging information.
+ *
+ * @return The stack trace including packaging information.
+ */
+ public ExtendedStackTraceElement[] getExtendedStackTrace() {
+ return this.extendedStackTrace;
+ }
+
+ /**
+ * Format the stack trace including packaging information.
+ *
+ * @return The formatted stack trace including packaging information.
+ */
+ public String getExtendedStackTraceAsString() {
+ return this.getExtendedStackTraceAsString(null);
+ }
+
+ /**
+ * Format the stack trace including packaging information.
+ *
+ * @param ignorePackages
+ * List of packages to be ignored in the trace.
+ * @return The formatted stack trace including packaging information.
+ */
+ public String getExtendedStackTraceAsString(final List ignorePackages) {
+ final StringBuilder sb = new StringBuilder(this.name);
+ final String msg = this.message;
+ if (msg != null) {
+ sb.append(": ").append(msg);
+ }
+ sb.append(EOL);
+ final StackTraceElement[] causedTrace = this.throwable != null ? this.throwable.getStackTrace() : null;
+ this.formatElements(sb, "", 0, causedTrace, this.extendedStackTrace, ignorePackages);
+ this.formatSuppressed(sb, "\t", this.suppressedProxies, ignorePackages);
+ this.formatCause(sb, "", this.causeProxy, ignorePackages);
+ return sb.toString();
+ }
+
+ public String getLocalizedMessage() {
+ return this.localizedMessage;
+ }
+
+ public String getMessage() {
+ return this.message;
+ }
+
+ /**
+ * Return the FQCN of the Throwable.
+ *
+ * @return The FQCN of the Throwable.
+ */
+ public String getName() {
+ return this.name;
+ }
+
+ public StackTraceElement[] getStackTrace() {
+ return this.throwable == null ? null : this.throwable.getStackTrace();
+ }
+
+ /**
+ * Gets proxies for suppressed exceptions.
+ *
+ * @return proxies for suppressed exceptions.
+ */
+ public ThrowableProxy[] getSuppressedProxies() {
+ return this.suppressedProxies;
+ }
+
+ /**
+ * Format the suppressed Throwables.
+ *
+ * @return The formatted suppressed Throwables.
+ */
+ public String getSuppressedStackTrace() {
+ final ThrowableProxy[] suppressed = this.getSuppressedProxies();
+ if (suppressed == null || suppressed.length == 0) {
+ return Strings.EMPTY;
+ }
+ final StringBuilder sb = new StringBuilder("Suppressed Stack Trace Elements:").append(EOL);
+ for (final ThrowableProxy proxy : suppressed) {
+ sb.append(proxy.getExtendedStackTraceAsString());
+ }
+ return sb.toString();
+ }
+
+ /**
+ * The throwable or null if this object is deserialized from XML or JSON.
+ *
+ * @return The throwable or null if this object is deserialized from XML or JSON.
+ */
+ public Throwable getThrowable() {
+ return this.throwable;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + (this.causeProxy == null ? 0 : this.causeProxy.hashCode());
+ result = prime * result + this.commonElementCount;
+ result = prime * result + (this.extendedStackTrace == null ? 0 : Arrays.hashCode(this.extendedStackTrace));
+ result = prime * result + (this.suppressedProxies == null ? 0 : Arrays.hashCode(this.suppressedProxies));
+ result = prime * result + (this.name == null ? 0 : this.name.hashCode());
+ return result;
+ }
+
+ private boolean ignoreElement(final StackTraceElement element, final List ignorePackages) {
+ final String className = element.getClassName();
+ for (final String pkg : ignorePackages) {
+ if (className.startsWith(pkg)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Loads classes not located via Reflection.getCallerClass.
+ *
+ * @param lastLoader
+ * The ClassLoader that loaded the Class that called this Class.
+ * @param className
+ * The name of the Class.
+ * @return The Class object for the Class or null if it could not be located.
+ */
+ private Class> loadClass(final ClassLoader lastLoader, final String className) {
+ // XXX: this is overly complicated
+ Class> clazz;
+ if (lastLoader != null) {
+ try {
+ clazz = Loader.initializeClass(className, lastLoader);
+ if (clazz != null) {
+ return clazz;
+ }
+ } catch (final Throwable ignore) {
+ // Ignore exception.
+ }
+ }
+ try {
+ clazz = Loader.loadClass(className);
+ } catch (final ClassNotFoundException ignored) {
+ return initializeClass(className);
+ } catch (final NoClassDefFoundError ignored) {
+ return initializeClass(className);
+ } catch (final SecurityException ignored) {
+ return initializeClass(className);
+ }
+ return clazz;
+ }
+
+ private Class> initializeClass(final String className) {
+ try {
+ return Loader.initializeClass(className, this.getClass().getClassLoader());
+ } catch (final ClassNotFoundException ignore) {
+ return null;
+ } catch (final NoClassDefFoundError ignore) {
+ return null;
+ } catch (final SecurityException ignore) {
+ return null;
+ }
+ }
+
+ /**
+ * Construct the CacheEntry from the Class's information.
+ *
+ * @param stackTraceElement
+ * The stack trace element
+ * @param callerClass
+ * The Class.
+ * @param exact
+ * True if the class was obtained via Reflection.getCallerClass.
+ *
+ * @return The CacheEntry.
+ */
+ private CacheEntry toCacheEntry(final StackTraceElement stackTraceElement, final Class> callerClass,
+ final boolean exact) {
+ String location = "?";
+ String version = "?";
+ ClassLoader lastLoader = null;
+ if (callerClass != null) {
+ try {
+ final CodeSource source = callerClass.getProtectionDomain().getCodeSource();
+ if (source != null) {
+ final URL locationURL = source.getLocation();
+ if (locationURL != null) {
+ final String str = locationURL.toString().replace('\\', '/');
+ int index = str.lastIndexOf("/");
+ if (index >= 0 && index == str.length() - 1) {
+ index = str.lastIndexOf("/", index - 1);
+ location = str.substring(index + 1);
+ } else {
+ location = str.substring(index + 1);
+ }
+ }
+ }
+ } catch (final Exception ex) {
+ // Ignore the exception.
+ }
+ final Package pkg = callerClass.getPackage();
+ if (pkg != null) {
+ final String ver = pkg.getImplementationVersion();
+ if (ver != null) {
+ version = ver;
+ }
+ }
+ lastLoader = callerClass.getClassLoader();
+ }
+ return new CacheEntry(new ExtendedClassInfo(exact, location, version), lastLoader);
+ }
+
+ /**
+ * Resolve all the stack entries in this stack trace that are not common with the parent.
+ *
+ * @param stack
+ * The callers Class stack.
+ * @param map
+ * The cache of CacheEntry objects.
+ * @param rootTrace
+ * The first stack trace resolve or null.
+ * @param stackTrace
+ * The stack trace being resolved.
+ * @return The StackTracePackageElement array.
+ */
+ ExtendedStackTraceElement[] toExtendedStackTrace(final Stack> stack, final Map map,
+ final StackTraceElement[] rootTrace, final StackTraceElement[] stackTrace) {
+ int stackLength;
+ if (rootTrace != null) {
+ int rootIndex = rootTrace.length - 1;
+ int stackIndex = stackTrace.length - 1;
+ while (rootIndex >= 0 && stackIndex >= 0 && rootTrace[rootIndex].equals(stackTrace[stackIndex])) {
+ --rootIndex;
+ --stackIndex;
+ }
+ this.commonElementCount = stackTrace.length - 1 - stackIndex;
+ stackLength = stackIndex + 1;
+ } else {
+ this.commonElementCount = 0;
+ stackLength = stackTrace.length;
+ }
+ final ExtendedStackTraceElement[] extStackTrace = new ExtendedStackTraceElement[stackLength];
+ Class> clazz = stack.isEmpty() ? null : stack.peek();
+ ClassLoader lastLoader = null;
+ for (int i = stackLength - 1; i >= 0; --i) {
+ final StackTraceElement stackTraceElement = stackTrace[i];
+ final String className = stackTraceElement.getClassName();
+ // The stack returned from getCurrentStack may be missing entries for java.lang.reflect.Method.invoke()
+ // and its implementation. The Throwable might also contain stack entries that are no longer
+ // present as those methods have returned.
+ ExtendedClassInfo extClassInfo;
+ if (clazz != null && className.equals(clazz.getName())) {
+ final CacheEntry entry = this.toCacheEntry(stackTraceElement, clazz, true);
+ extClassInfo = entry.element;
+ lastLoader = entry.loader;
+ stack.pop();
+ clazz = stack.isEmpty() ? null : stack.peek();
+ } else {
+ final CacheEntry cacheEntry = map.get(className);
+ if (cacheEntry != null) {
+ final CacheEntry entry = cacheEntry;
+ extClassInfo = entry.element;
+ if (entry.loader != null) {
+ lastLoader = entry.loader;
+ }
+ } else {
+ final CacheEntry entry = this.toCacheEntry(stackTraceElement,
+ this.loadClass(lastLoader, className), false);
+ extClassInfo = entry.element;
+ map.put(stackTraceElement.toString(), entry);
+ if (entry.loader != null) {
+ lastLoader = entry.loader;
+ }
+ }
+ }
+ extStackTrace[i] = new ExtendedStackTraceElement(stackTraceElement, extClassInfo);
+ }
+ return extStackTrace;
+ }
+
+ @Override
+ public String toString() {
+ final String msg = this.message;
+ return msg != null ? this.name + ": " + msg : this.name;
+ }
+
+ private ThrowableProxy[] toSuppressedProxies(final Throwable thrown, Set suppressedVisited) {
+ try {
+ final Throwable[] suppressed = thrown.getSuppressed();
+ if (suppressed == null) {
+ return EMPTY_THROWABLE_PROXY_ARRAY;
+ }
+ final List proxies = new ArrayList<>(suppressed.length);
+ if (suppressedVisited == null) {
+ suppressedVisited = new HashSet<>(proxies.size());
+ }
+ for (int i = 0; i < suppressed.length; i++) {
+ final Throwable candidate = suppressed[i];
+ if (!suppressedVisited.contains(candidate)) {
+ suppressedVisited.add(candidate);
+ proxies.add(new ThrowableProxy(candidate, suppressedVisited));
+ }
+ }
+ return proxies.toArray(new ThrowableProxy[proxies.size()]);
+ } catch (final Exception e) {
+ StatusLogger.getLogger().error(e);
+ }
+ return null;
+ }
+}
diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java
index 9bbe08208d7..bcf0a2b201a 100644
--- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java
+++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java
@@ -102,7 +102,6 @@ public class MapperQueryParser extends QueryParser {
setLowercaseExpandedTerms(settings.lowercaseExpandedTerms());
setPhraseSlop(settings.phraseSlop());
setDefaultOperator(settings.defaultOperator());
- setFuzzyMinSim(settings.fuzziness().asFloat());
setFuzzyPrefixLength(settings.fuzzyPrefixLength());
setLocale(settings.locale());
}
@@ -114,7 +113,7 @@ public class MapperQueryParser extends QueryParser {
@Override
Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage) throws ParseException {
if (fuzzySlop.image.length() == 1) {
- return getFuzzyQuery(qfield, termImage, Float.toString(fuzzyMinSim));
+ return getFuzzyQuery(qfield, termImage, Float.toString(settings.fuzziness().asDistance(termImage)));
}
return getFuzzyQuery(qfield, termImage, fuzzySlop.image.substring(1));
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java
index d7ce899792f..dd243cf6d88 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java
@@ -37,10 +37,6 @@ import org.elasticsearch.threadpool.ThreadPoolInfo;
import org.elasticsearch.transport.TransportInfo;
import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import static java.util.Collections.unmodifiableMap;
/**
* Node information (static, does not change over time).
@@ -85,8 +81,8 @@ public class NodeInfo extends BaseNodeResponse {
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Settings settings,
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool,
- @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins, @Nullable IngestInfo ingest,
- @Nullable ByteSizeValue totalIndexingBuffer) {
+ @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins,
+ @Nullable IngestInfo ingest, @Nullable ByteSizeValue totalIndexingBuffer) {
super(node);
this.version = version;
this.build = build;
@@ -205,31 +201,14 @@ public class NodeInfo extends BaseNodeResponse {
if (in.readBoolean()) {
settings = Settings.readSettingsFromStream(in);
}
- if (in.readBoolean()) {
- os = OsInfo.readOsInfo(in);
- }
- if (in.readBoolean()) {
- process = ProcessInfo.readProcessInfo(in);
- }
- if (in.readBoolean()) {
- jvm = JvmInfo.readJvmInfo(in);
- }
- if (in.readBoolean()) {
- threadPool = ThreadPoolInfo.readThreadPoolInfo(in);
- }
- if (in.readBoolean()) {
- transport = TransportInfo.readTransportInfo(in);
- }
- if (in.readBoolean()) {
- http = HttpInfo.readHttpInfo(in);
- }
- if (in.readBoolean()) {
- plugins = new PluginsAndModules();
- plugins.readFrom(in);
- }
- if (in.readBoolean()) {
- ingest = new IngestInfo(in);
- }
+ os = in.readOptionalWriteable(OsInfo::new);
+ process = in.readOptionalWriteable(ProcessInfo::new);
+ jvm = in.readOptionalWriteable(JvmInfo::new);
+ threadPool = in.readOptionalWriteable(ThreadPoolInfo::new);
+ transport = in.readOptionalWriteable(TransportInfo::new);
+ http = in.readOptionalWriteable(HttpInfo::new);
+ plugins = in.readOptionalWriteable(PluginsAndModules::new);
+ ingest = in.readOptionalWriteable(IngestInfo::new);
}
@Override
@@ -249,53 +228,13 @@ public class NodeInfo extends BaseNodeResponse {
out.writeBoolean(true);
Settings.writeSettingsToStream(settings, out);
}
- if (os == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- os.writeTo(out);
- }
- if (process == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- process.writeTo(out);
- }
- if (jvm == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- jvm.writeTo(out);
- }
- if (threadPool == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- threadPool.writeTo(out);
- }
- if (transport == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- transport.writeTo(out);
- }
- if (http == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- http.writeTo(out);
- }
- if (plugins == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- plugins.writeTo(out);
- }
- if (ingest == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- ingest.writeTo(out);
- }
+ out.writeOptionalWriteable(os);
+ out.writeOptionalWriteable(process);
+ out.writeOptionalWriteable(jvm);
+ out.writeOptionalWriteable(threadPool);
+ out.writeOptionalWriteable(transport);
+ out.writeOptionalWriteable(http);
+ out.writeOptionalWriteable(plugins);
+ out.writeOptionalWriteable(ingest);
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java
index 3831fd24f3e..206dd262ed8 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/PluginsAndModules.java
@@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.node.info;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.PluginInfo;
@@ -34,13 +34,24 @@ import java.util.List;
/**
* Information about plugins and modules
*/
-public class PluginsAndModules implements Streamable, ToXContent {
- private List plugins;
- private List modules;
+public class PluginsAndModules implements Writeable, ToXContent {
+ private final List plugins;
+ private final List modules;
- public PluginsAndModules() {
- plugins = new ArrayList<>();
- modules = new ArrayList<>();
+ public PluginsAndModules(List plugins, List modules) {
+ this.plugins = Collections.unmodifiableList(plugins);
+ this.modules = Collections.unmodifiableList(modules);
+ }
+
+ public PluginsAndModules(StreamInput in) throws IOException {
+ this.plugins = Collections.unmodifiableList(in.readList(PluginInfo::new));
+ this.modules = Collections.unmodifiableList(in.readList(PluginInfo::new));
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeList(plugins);
+ out.writeList(modules);
}
/**
@@ -69,33 +80,6 @@ public class PluginsAndModules implements Streamable, ToXContent {
modules.add(info);
}
- @Override
- public void readFrom(StreamInput in) throws IOException {
- if (plugins.isEmpty() == false || modules.isEmpty() == false) {
- throw new IllegalStateException("instance is already populated");
- }
- int plugins_size = in.readInt();
- for (int i = 0; i < plugins_size; i++) {
- plugins.add(PluginInfo.readFromStream(in));
- }
- int modules_size = in.readInt();
- for (int i = 0; i < modules_size; i++) {
- modules.add(PluginInfo.readFromStream(in));
- }
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeInt(plugins.size());
- for (PluginInfo plugin : getPluginInfos()) {
- plugin.writeTo(out);
- }
- out.writeInt(modules.size());
- for (PluginInfo module : getModuleInfos()) {
- module.writeTo(out);
- }
- }
-
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray("plugins");
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java
index f8cf62164e9..c4cd5d908af 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java
@@ -86,7 +86,7 @@ public class JarHell {
}
checkJarHell(parseClassPath());
}
-
+
/**
* Parses the classpath into an array of URLs
* @return array of URLs
@@ -277,6 +277,14 @@ public class JarHell {
if (clazz.equals("org.joda.time.base.BaseDateTime")) {
return; // apparently this is intentional... clean this up
}
+ if (clazz.startsWith("org.apache.logging.log4j.core.impl.ThrowableProxy")) {
+ /*
+ * deliberate to hack around a bug in Log4j
+ * cf. https://github.com/elastic/elasticsearch/issues/20304
+ * cf. https://issues.apache.org/jira/browse/LOG4J2-1560
+ */
+ return;
+ }
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
"class: " + clazz + System.lineSeparator() +
"jar1: " + previous + System.lineSeparator() +
diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java
index 3b7ec0deb34..68e2886cf37 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java
@@ -30,6 +30,7 @@ import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration;
import org.apache.logging.log4j.core.config.composite.CompositeConfiguration;
import org.apache.logging.log4j.core.config.properties.PropertiesConfiguration;
import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationFactory;
+import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.settings.Settings;
@@ -43,6 +44,7 @@ import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
@@ -63,9 +65,9 @@ public class LogConfigurator {
final LoggerContext context = (LoggerContext) LogManager.getContext(false);
if (resolveConfig) {
- final Set options = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
final List configurations = new ArrayList<>();
final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory();
+ final Set options = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
Files.walkFileTree(environment.configFile(), options, Integer.MAX_VALUE, new SimpleFileVisitor() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
@@ -76,6 +78,7 @@ public class LogConfigurator {
}
});
context.start(new CompositeConfiguration(configurations));
+ warnIfOldConfigurationFilePresent(environment);
}
if (ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.exists(settings)) {
@@ -89,8 +92,32 @@ public class LogConfigurator {
}
}
+ private static void warnIfOldConfigurationFilePresent(final Environment environment) throws IOException {
+ // TODO: the warning for unsupported logging configurations can be removed in 6.0.0
+ assert Version.CURRENT.major < 6;
+ final List suffixes = Arrays.asList(".yml", ".yaml", ".json", ".properties");
+ final Set options = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
+ Files.walkFileTree(environment.configFile(), options, Integer.MAX_VALUE, new SimpleFileVisitor() {
+ @Override
+ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
+ final String fileName = file.getFileName().toString();
+ if (fileName.startsWith("logging")) {
+ for (final String suffix : suffixes) {
+ if (fileName.endsWith(suffix)) {
+ Loggers.getLogger(LogConfigurator.class).warn(
+ "ignoring unsupported logging configuration file [{}], logging is configured via [{}]",
+ file.toString(),
+ file.getParent().resolve("log4j2.properties"));
+ }
+ }
+ }
+ return FileVisitResult.CONTINUE;
+ }
+ });
+ }
+
@SuppressForbidden(reason = "sets system property for logging configuration")
- private static void setLogConfigurationSystemProperty(Environment environment, Settings settings) {
+ private static void setLogConfigurationSystemProperty(final Environment environment, final Settings settings) {
System.setProperty("es.logs", environment.logsFile().resolve(ClusterName.CLUSTER_NAME_SETTING.get(settings).value()).toString());
}
diff --git a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java
index 32df65850a8..5944a8e06e6 100644
--- a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java
+++ b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java
@@ -23,20 +23,25 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
-public class ByteSizeValue implements Streamable {
+public class ByteSizeValue implements Writeable {
- private long size;
+ private final long size;
+ private final ByteSizeUnit sizeUnit;
- private ByteSizeUnit sizeUnit;
-
- private ByteSizeValue() {
+ public ByteSizeValue(StreamInput in) throws IOException {
+ size = in.readVLong();
+ sizeUnit = ByteSizeUnit.BYTES;
+ }
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVLong(bytes());
}
public ByteSizeValue(long bytes) {
@@ -172,7 +177,8 @@ public class ByteSizeValue implements Streamable {
return parseBytesSizeValue(sValue, null, settingName);
}
- public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue, String settingName) throws ElasticsearchParseException {
+ public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue, String settingName)
+ throws ElasticsearchParseException {
settingName = Objects.requireNonNull(settingName);
if (sValue == null) {
return defaultValue;
@@ -210,7 +216,9 @@ public class ByteSizeValue implements Streamable {
bytes = 0;
} else {
// Missing units:
- throw new ElasticsearchParseException("failed to parse setting [{}] with value [{}] as a size in bytes: unit is missing or unrecognized", settingName, sValue);
+ throw new ElasticsearchParseException(
+ "failed to parse setting [{}] with value [{}] as a size in bytes: unit is missing or unrecognized",
+ settingName, sValue);
}
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("failed to parse [{}]", e, sValue);
@@ -218,23 +226,6 @@ public class ByteSizeValue implements Streamable {
return new ByteSizeValue(bytes, ByteSizeUnit.BYTES);
}
- public static ByteSizeValue readBytesSizeValue(StreamInput in) throws IOException {
- ByteSizeValue sizeValue = new ByteSizeValue();
- sizeValue.readFrom(in);
- return sizeValue;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- size = in.readVLong();
- sizeUnit = ByteSizeUnit.BYTES;
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeVLong(bytes());
- }
-
@Override
public boolean equals(Object o) {
if (this == o) {
diff --git a/core/src/main/java/org/elasticsearch/common/unit/SizeValue.java b/core/src/main/java/org/elasticsearch/common/unit/SizeValue.java
index e04dfe51430..cba51f29eeb 100644
--- a/core/src/main/java/org/elasticsearch/common/unit/SizeValue.java
+++ b/core/src/main/java/org/elasticsearch/common/unit/SizeValue.java
@@ -23,22 +23,14 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
-/**
- *
- */
-public class SizeValue implements Streamable {
+public class SizeValue implements Writeable {
- private long size;
-
- private SizeUnit sizeUnit;
-
- private SizeValue() {
-
- }
+ private final long size;
+ private final SizeUnit sizeUnit;
public SizeValue(long singles) {
this(singles, SizeUnit.SINGLE);
@@ -52,6 +44,16 @@ public class SizeValue implements Streamable {
this.sizeUnit = sizeUnit;
}
+ public SizeValue(StreamInput in) throws IOException {
+ size = in.readVLong();
+ sizeUnit = SizeUnit.SINGLE;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVLong(singles());
+ }
+
public long singles() {
return sizeUnit.toSingles(size);
}
@@ -194,23 +196,6 @@ public class SizeValue implements Streamable {
return new SizeValue(singles, SizeUnit.SINGLE);
}
- public static SizeValue readSizeValue(StreamInput in) throws IOException {
- SizeValue sizeValue = new SizeValue();
- sizeValue.readFrom(in);
- return sizeValue;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- size = in.readVLong();
- sizeUnit = SizeUnit.SINGLE;
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeVLong(singles());
- }
-
@Override
public boolean equals(Object o) {
if (this == o) return true;
diff --git a/core/src/main/java/org/elasticsearch/http/HttpInfo.java b/core/src/main/java/org/elasticsearch/http/HttpInfo.java
index 0f285974e8a..e8f3985a23a 100644
--- a/core/src/main/java/org/elasticsearch/http/HttpInfo.java
+++ b/core/src/main/java/org/elasticsearch/http/HttpInfo.java
@@ -21,7 +21,7 @@ package org.elasticsearch.http;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ToXContent;
@@ -29,15 +29,20 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
-/**
- *
- */
-public class HttpInfo implements Streamable, ToXContent {
+public class HttpInfo implements Writeable, ToXContent {
- private BoundTransportAddress address;
- private long maxContentLength;
+ private final BoundTransportAddress address;
+ private final long maxContentLength;
- HttpInfo() {
+ public HttpInfo(StreamInput in) throws IOException {
+ address = BoundTransportAddress.readBoundTransportAddress(in);
+ maxContentLength = in.readLong();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ address.writeTo(out);
+ out.writeLong(maxContentLength);
}
public HttpInfo(BoundTransportAddress address, long maxContentLength) {
@@ -63,24 +68,6 @@ public class HttpInfo implements Streamable, ToXContent {
return builder;
}
- public static HttpInfo readHttpInfo(StreamInput in) throws IOException {
- HttpInfo info = new HttpInfo();
- info.readFrom(in);
- return info;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- address = BoundTransportAddress.readBoundTransportAddress(in);
- maxContentLength = in.readLong();
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- address.writeTo(out);
- out.writeLong(maxContentLength);
- }
-
public BoundTransportAddress address() {
return address;
}
diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java
index a6b5583a0e2..9f9d2186a83 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java
@@ -25,6 +25,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy;
import org.apache.lucene.search.QueryCache;
import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.similarities.Similarity;
+import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -65,6 +66,7 @@ public final class EngineConfig {
private final Engine.EventListener eventListener;
private final QueryCache queryCache;
private final QueryCachingPolicy queryCachingPolicy;
+ private final long maxUnsafeAutoIdTimestamp;
@Nullable
private final RefreshListeners refreshListeners;
@@ -107,10 +109,11 @@ public final class EngineConfig {
*/
public EngineConfig(OpenMode openMode, ShardId shardId, ThreadPool threadPool,
IndexSettings indexSettings, Engine.Warmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy,
- MergePolicy mergePolicy,Analyzer analyzer,
+ MergePolicy mergePolicy, Analyzer analyzer,
Similarity similarity, CodecService codecService, Engine.EventListener eventListener,
TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy,
- TranslogConfig translogConfig, TimeValue flushMergesAfter, RefreshListeners refreshListeners) {
+ TranslogConfig translogConfig, TimeValue flushMergesAfter, RefreshListeners refreshListeners,
+ long maxUnsafeAutoIdTimestamp) {
if (openMode == null) {
throw new IllegalArgumentException("openMode must not be null");
}
@@ -137,6 +140,9 @@ public final class EngineConfig {
this.flushMergesAfter = flushMergesAfter;
this.openMode = openMode;
this.refreshListeners = refreshListeners;
+ assert maxUnsafeAutoIdTimestamp >= IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP :
+ "maxUnsafeAutoIdTimestamp must be >= -1 but was " + maxUnsafeAutoIdTimestamp;
+ this.maxUnsafeAutoIdTimestamp = maxUnsafeAutoIdTimestamp;
}
/**
@@ -323,10 +329,10 @@ public final class EngineConfig {
}
/**
- * Returns true
iff auto generated IDs should be optimized inside the engine for append only.
- * The default is true
.
+ * Returns the max timestamp that is used to de-optimize documents with auto-generated IDs in the engine.
+ * This is used to ensure we don't add duplicate documents when we assume an append only case based on auto-generated IDs
*/
- public boolean getOptimizeAutoGeneratedIds() {
- return indexSettings.getValue(INDEX_OPTIMIZE_AUTO_GENERATED_IDS);
+ public long getMaxUnsafeAutoIdTimestamp() {
+ return indexSettings.getValue(INDEX_OPTIMIZE_AUTO_GENERATED_IDS) ? maxUnsafeAutoIdTimestamp : Long.MAX_VALUE;
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
index 45311fef518..774465fb71a 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
@@ -127,10 +127,11 @@ public class InternalEngine extends Engine {
public InternalEngine(EngineConfig engineConfig) throws EngineException {
super(engineConfig);
openMode = engineConfig.getOpenMode();
- if (engineConfig.getOptimizeAutoGeneratedIds() == false
- || engineConfig.getIndexSettings().getIndexVersionCreated().before(Version.V_5_0_0_alpha6)) {
+ if (engineConfig.getIndexSettings().getIndexVersionCreated().before(Version.V_5_0_0_alpha6)) {
// no optimization for pre 5.0.0.alpha6 since translog might not have all information needed
maxUnsafeAutoIdTimestamp.set(Long.MAX_VALUE);
+ } else {
+ maxUnsafeAutoIdTimestamp.set(engineConfig.getMaxUnsafeAutoIdTimestamp());
}
this.versionMap = new LiveVersionMap();
store.incRef();
@@ -1299,7 +1300,7 @@ public class InternalEngine extends Engine {
mergeScheduler.refreshConfig();
// config().isEnableGcDeletes() or config.getGcDeletesInMillis() may have changed:
maybePruneDeletedTombstones();
- if (engineConfig.getOptimizeAutoGeneratedIds() == false) {
+ if (engineConfig.getMaxUnsafeAutoIdTimestamp() == Long.MAX_VALUE) {
// this is an anti-viral settings you can only opt out for the entire index
// only if a shard starts up again due to relocation or if the index is closed
// the setting will be re-interpreted if it's set to true
diff --git a/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java b/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java
index 7a4bf8155b3..637beebfec8 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java
@@ -279,7 +279,7 @@ public class SegmentsStats implements Streamable, ToXContent {
}
/**
- * Returns the max timestamp that is used to de-optimize documetns with auto-generated IDs in the engine.
+ * Returns the max timestamp that is used to de-optimize documents with auto-generated IDs in the engine.
* This is used to ensure we don't add duplicate documents when we assume an append only case based on auto-generated IDs
*/
public long getMaxUnsafeAutoIdTimestamp() {
diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
index 45b8104cb75..a38585cf468 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
@@ -42,6 +42,7 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest;
+import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.routing.RecoverySource;
@@ -959,11 +960,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
translogStats.totalOperations(0);
translogStats.totalOperationsOnStart(0);
}
- internalPerformTranslogRecovery(false, indexExists);
+ internalPerformTranslogRecovery(false, indexExists, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
assert recoveryState.getStage() == RecoveryState.Stage.TRANSLOG : "TRANSLOG stage expected but was: " + recoveryState.getStage();
}
- private void internalPerformTranslogRecovery(boolean skipTranslogRecovery, boolean indexExists) throws IOException {
+ private void internalPerformTranslogRecovery(boolean skipTranslogRecovery, boolean indexExists, long maxUnsafeAutoIdTimestamp) throws IOException {
if (state != IndexShardState.RECOVERING) {
throw new IndexShardNotRecoveringException(shardId, state);
}
@@ -992,7 +993,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
} else {
openMode = EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG;
}
- final EngineConfig config = newEngineConfig(openMode);
+ final EngineConfig config = newEngineConfig(openMode, maxUnsafeAutoIdTimestamp);
// we disable deletes since we allow for operations to be executed against the shard while recovering
// but we need to make sure we don't loose deletes until we are done recovering
config.setEnableGcDeletes(false);
@@ -1012,9 +1013,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
* the replay of the transaction log which is required in cases where we restore a previous index or recover from
* a remote peer.
*/
- public void skipTranslogRecovery() throws IOException {
+ public void skipTranslogRecovery(long maxUnsafeAutoIdTimestamp) throws IOException {
assert getEngineOrNull() == null : "engine was already created";
- internalPerformTranslogRecovery(true, true);
+ internalPerformTranslogRecovery(true, true, maxUnsafeAutoIdTimestamp);
assert recoveryState.getTranslog().recoveredOperations() == 0;
}
@@ -1603,12 +1604,13 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
return mapperService.documentMapperWithAutoCreate(type);
}
- private EngineConfig newEngineConfig(EngineConfig.OpenMode openMode) {
+ private EngineConfig newEngineConfig(EngineConfig.OpenMode openMode, long maxUnsafeAutoIdTimestamp) {
final IndexShardRecoveryPerformer translogRecoveryPerformer = new IndexShardRecoveryPerformer(shardId, mapperService, logger);
return new EngineConfig(openMode, shardId,
threadPool, indexSettings, warmer, store, deletionPolicy, indexSettings.getMergePolicy(),
mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig,
- IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), refreshListeners);
+ IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), refreshListeners,
+ maxUnsafeAutoIdTimestamp);
}
/**
diff --git a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java
index 6ff897a8c89..44b4ed933f7 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java
@@ -30,6 +30,7 @@ import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.routing.RecoverySource;
@@ -346,7 +347,7 @@ final class StoreRecovery {
recoveryState.getIndex().updateVersion(version);
if (recoveryState.getRecoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) {
assert indexShouldExists;
- indexShard.skipTranslogRecovery();
+ indexShard.skipTranslogRecovery(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
} else {
// since we recover from local, just fill the files and size
try {
@@ -398,7 +399,7 @@ final class StoreRecovery {
}
final IndexId indexId = repository.getRepositoryData().resolveIndexId(indexName);
repository.restoreShard(indexShard, restoreSource.snapshot().getSnapshotId(), restoreSource.version(), indexId, snapshotShardId, indexShard.recoveryState());
- indexShard.skipTranslogRecovery();
+ indexShard.skipTranslogRecovery(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
indexShard.finalizeRecovery();
indexShard.postRecovery("restore done");
} catch (Exception e) {
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
index 778e6c88fa8..f26d0787f41 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
@@ -304,7 +304,7 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde
public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, TransportChannel channel) throws Exception {
try (RecoveriesCollection.RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId()
)) {
- recoveryRef.status().prepareForTranslogOperations(request.totalTranslogOps());
+ recoveryRef.status().prepareForTranslogOperations(request.totalTranslogOps(), request.getMaxUnsafeAutoIdTimestamp());
}
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java
index 69a55e03c9a..d2e07bd9e4c 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverFilesRecoveryException.java
@@ -57,7 +57,7 @@ public class RecoverFilesRecoveryException extends ElasticsearchException implem
public RecoverFilesRecoveryException(StreamInput in) throws IOException{
super(in);
numberOfFiles = in.readInt();
- totalFilesSize = ByteSizeValue.readBytesSizeValue(in);
+ totalFilesSize = new ByteSizeValue(in);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java
index 3d5d7052c9d..171102d07ea 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java
@@ -19,6 +19,7 @@
package org.elasticsearch.indices.recovery;
+import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.shard.ShardId;
@@ -31,6 +32,7 @@ import java.io.IOException;
*/
public class RecoveryPrepareForTranslogOperationsRequest extends TransportRequest {
+ private long maxUnsafeAutoIdTimestamp = IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP;
private long recoveryId;
private ShardId shardId;
private int totalTranslogOps = RecoveryState.Translog.UNKNOWN;
@@ -38,10 +40,11 @@ public class RecoveryPrepareForTranslogOperationsRequest extends TransportReques
public RecoveryPrepareForTranslogOperationsRequest() {
}
- RecoveryPrepareForTranslogOperationsRequest(long recoveryId, ShardId shardId, int totalTranslogOps) {
+ RecoveryPrepareForTranslogOperationsRequest(long recoveryId, ShardId shardId, int totalTranslogOps, long maxUnsafeAutoIdTimestamp) {
this.recoveryId = recoveryId;
this.shardId = shardId;
this.totalTranslogOps = totalTranslogOps;
+ this.maxUnsafeAutoIdTimestamp = maxUnsafeAutoIdTimestamp;
}
public long recoveryId() {
@@ -56,12 +59,17 @@ public class RecoveryPrepareForTranslogOperationsRequest extends TransportReques
return totalTranslogOps;
}
+ public long getMaxUnsafeAutoIdTimestamp() {
+ return maxUnsafeAutoIdTimestamp;
+ }
+
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
recoveryId = in.readLong();
shardId = ShardId.readShardId(in);
totalTranslogOps = in.readVInt();
+ maxUnsafeAutoIdTimestamp = in.readLong();
}
@Override
@@ -70,5 +78,6 @@ public class RecoveryPrepareForTranslogOperationsRequest extends TransportReques
out.writeLong(recoveryId);
shardId.writeTo(out);
out.writeVInt(totalTranslogOps);
+ out.writeLong(maxUnsafeAutoIdTimestamp);
}
}
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java
index cb3356358cd..790376ba78e 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java
@@ -346,7 +346,8 @@ public class RecoverySourceHandler {
// Send a request preparing the new shard's translog to receive
// operations. This ensures the shard engine is started and disables
// garbage collection (not the JVM's GC!) of tombstone deletes
- cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(totalTranslogOps));
+ cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(totalTranslogOps,
+ shard.segmentStats(false).getMaxUnsafeAutoIdTimestamp()));
stopWatch.stop();
response.startTime = stopWatch.totalTime().millis() - startEngineStart;
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java
index 9d833fe163e..d608dc50e23 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java
@@ -327,9 +327,9 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget
/*** Implementation of {@link RecoveryTargetHandler } */
@Override
- public void prepareForTranslogOperations(int totalTranslogOps) throws IOException {
+ public void prepareForTranslogOperations(int totalTranslogOps, long maxUnsafeAutoIdTimestamp) throws IOException {
state().getTranslog().totalOperations(totalTranslogOps);
- indexShard().skipTranslogRecovery();
+ indexShard().skipTranslogRecovery(maxUnsafeAutoIdTimestamp);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java
index 3d7e4f29c35..18966028792 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java
@@ -33,8 +33,10 @@ public interface RecoveryTargetHandler {
* Prepares the tranget to receive translog operations, after all file have been copied
*
* @param totalTranslogOps total translog operations expected to be sent
+ * @param maxUnsafeAutoIdTimestamp the max timestamp that is used to de-optimize documents with auto-generated IDs in the engine.
+ * This is used to ensure we don't add duplicate documents when we assume an append only case based on auto-generated IDs
*/
- void prepareForTranslogOperations(int totalTranslogOps) throws IOException;
+ void prepareForTranslogOperations(int totalTranslogOps, long maxUnsafeAutoIdTimestamp) throws IOException;
/**
* The finalize request clears unreferenced translog files, refreshes the engine now that
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java
index 428c1615880..327eb3b8eca 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java
@@ -74,9 +74,9 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler {
}
@Override
- public void prepareForTranslogOperations(int totalTranslogOps) throws IOException {
+ public void prepareForTranslogOperations(int totalTranslogOps, long maxUnsafeAutoIdTimestamp) throws IOException {
transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG,
- new RecoveryPrepareForTranslogOperationsRequest(recoveryId, shardId, totalTranslogOps),
+ new RecoveryPrepareForTranslogOperationsRequest(recoveryId, shardId, totalTranslogOps, maxUnsafeAutoIdTimestamp),
TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(),
EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java
index 1619ecee23a..ca0bb4f3e80 100644
--- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java
+++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java
@@ -19,10 +19,9 @@
package org.elasticsearch.monitor.jvm;
-import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -37,14 +36,10 @@ import java.lang.management.PlatformManagedObject;
import java.lang.management.RuntimeMXBean;
import java.lang.reflect.Method;
import java.util.Collections;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
-/**
- *
- */
-public class JvmInfo implements Streamable, ToXContent {
+public class JvmInfo implements Writeable, ToXContent {
private static JvmInfo INSTANCE;
@@ -61,100 +56,106 @@ public class JvmInfo implements Streamable, ToXContent {
} catch (Exception e) {
pid = -1;
}
- JvmInfo info = new JvmInfo();
- info.pid = pid;
- info.startTime = runtimeMXBean.getStartTime();
- info.version = System.getProperty("java.version");
- info.vmName = runtimeMXBean.getVmName();
- info.vmVendor = runtimeMXBean.getVmVendor();
- info.vmVersion = runtimeMXBean.getVmVersion();
- info.mem = new Mem();
- info.mem.heapInit = memoryMXBean.getHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getInit();
- info.mem.heapMax = memoryMXBean.getHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getMax();
- info.mem.nonHeapInit = memoryMXBean.getNonHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getInit();
- info.mem.nonHeapMax = memoryMXBean.getNonHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getMax();
+
+ long heapInit = memoryMXBean.getHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getInit();
+ long heapMax = memoryMXBean.getHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getHeapMemoryUsage().getMax();
+ long nonHeapInit = memoryMXBean.getNonHeapMemoryUsage().getInit() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getInit();
+ long nonHeapMax = memoryMXBean.getNonHeapMemoryUsage().getMax() < 0 ? 0 : memoryMXBean.getNonHeapMemoryUsage().getMax();
+ long directMemoryMax = 0;
try {
Class> vmClass = Class.forName("sun.misc.VM");
- info.mem.directMemoryMax = (Long) vmClass.getMethod("maxDirectMemory").invoke(null);
+ directMemoryMax = (Long) vmClass.getMethod("maxDirectMemory").invoke(null);
} catch (Exception t) {
// ignore
}
- info.inputArguments = runtimeMXBean.getInputArguments().toArray(new String[runtimeMXBean.getInputArguments().size()]);
+ String[] inputArguments = runtimeMXBean.getInputArguments().toArray(new String[runtimeMXBean.getInputArguments().size()]);
+ Mem mem = new Mem(heapInit, heapMax, nonHeapInit, nonHeapMax, directMemoryMax);
+
+ String bootClassPath;
try {
- info.bootClassPath = runtimeMXBean.getBootClassPath();
+ bootClassPath = runtimeMXBean.getBootClassPath();
} catch (UnsupportedOperationException e) {
// oracle java 9
- info.bootClassPath = System.getProperty("sun.boot.class.path");
- if (info.bootClassPath == null) {
+ bootClassPath = System.getProperty("sun.boot.class.path");
+ if (bootClassPath == null) {
// something else
- info.bootClassPath = "";
+ bootClassPath = "";
}
}
- info.classPath = runtimeMXBean.getClassPath();
- info.systemProperties = Collections.unmodifiableMap(runtimeMXBean.getSystemProperties());
+ String classPath = runtimeMXBean.getClassPath();
+ Map systemProperties = Collections.unmodifiableMap(runtimeMXBean.getSystemProperties());
List gcMxBeans = ManagementFactory.getGarbageCollectorMXBeans();
- info.gcCollectors = new String[gcMxBeans.size()];
+ String[] gcCollectors = new String[gcMxBeans.size()];
for (int i = 0; i < gcMxBeans.size(); i++) {
GarbageCollectorMXBean gcMxBean = gcMxBeans.get(i);
- info.gcCollectors[i] = gcMxBean.getName();
+ gcCollectors[i] = gcMxBean.getName();
}
List memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans();
- info.memoryPools = new String[memoryPoolMXBeans.size()];
+ String[] memoryPools = new String[memoryPoolMXBeans.size()];
for (int i = 0; i < memoryPoolMXBeans.size(); i++) {
MemoryPoolMXBean memoryPoolMXBean = memoryPoolMXBeans.get(i);
- info.memoryPools[i] = memoryPoolMXBean.getName();
+ memoryPools[i] = memoryPoolMXBean.getName();
}
+ String onError = null;
+ String onOutOfMemoryError = null;
+ String useCompressedOops = "unknown";
+ String useG1GC = "unknown";
+ long configuredInitialHeapSize = -1;
+ long configuredMaxHeapSize = -1;
try {
@SuppressWarnings("unchecked") Class extends PlatformManagedObject> clazz =
- (Class extends PlatformManagedObject>)Class.forName("com.sun.management.HotSpotDiagnosticMXBean");
+ (Class extends PlatformManagedObject>)Class.forName("com.sun.management.HotSpotDiagnosticMXBean");
Class> vmOptionClazz = Class.forName("com.sun.management.VMOption");
PlatformManagedObject hotSpotDiagnosticMXBean = ManagementFactory.getPlatformMXBean(clazz);
Method vmOptionMethod = clazz.getMethod("getVMOption", String.class);
Method valueMethod = vmOptionClazz.getMethod("getValue");
try {
- Object onError = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "OnError");
- info.onError = (String) valueMethod.invoke(onError);
+ Object onErrorObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "OnError");
+ onError = (String) valueMethod.invoke(onErrorObject);
} catch (Exception ignored) {
}
try {
- Object onOutOfMemoryError = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "OnOutOfMemoryError");
- info.onOutOfMemoryError = (String) valueMethod.invoke(onOutOfMemoryError);
+ Object onOutOfMemoryErrorObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "OnOutOfMemoryError");
+ onOutOfMemoryError = (String) valueMethod.invoke(onOutOfMemoryErrorObject);
} catch (Exception ignored) {
}
try {
- Object useCompressedOopsVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseCompressedOops");
- info.useCompressedOops = (String) valueMethod.invoke(useCompressedOopsVmOption);
+ Object useCompressedOopsVmOptionObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseCompressedOops");
+ useCompressedOops = (String) valueMethod.invoke(useCompressedOopsVmOptionObject);
} catch (Exception ignored) {
}
try {
- Object useG1GCVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseG1GC");
- info.useG1GC = (String) valueMethod.invoke(useG1GCVmOption);
+ Object useG1GCVmOptionObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseG1GC");
+ useG1GC = (String) valueMethod.invoke(useG1GCVmOptionObject);
} catch (Exception ignored) {
}
try {
- Object initialHeapSizeVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "InitialHeapSize");
- info.configuredInitialHeapSize = Long.parseLong((String) valueMethod.invoke(initialHeapSizeVmOption));
+ Object initialHeapSizeVmOptionObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "InitialHeapSize");
+ configuredInitialHeapSize = Long.parseLong((String) valueMethod.invoke(initialHeapSizeVmOptionObject));
} catch (Exception ignored) {
}
try {
- Object maxHeapSizeVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "MaxHeapSize");
- info.configuredMaxHeapSize = Long.parseLong((String) valueMethod.invoke(maxHeapSizeVmOption));
+ Object maxHeapSizeVmOptionObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "MaxHeapSize");
+ configuredMaxHeapSize = Long.parseLong((String) valueMethod.invoke(maxHeapSizeVmOptionObject));
} catch (Exception ignored) {
}
} catch (Exception ignored) {
}
- INSTANCE = info;
+ INSTANCE = new JvmInfo(pid, System.getProperty("java.version"), runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(),
+ runtimeMXBean.getVmVendor(), runtimeMXBean.getStartTime(), configuredInitialHeapSize, configuredMaxHeapSize,
+ mem, inputArguments, bootClassPath, classPath, systemProperties, gcCollectors, memoryPools, onError, onOutOfMemoryError,
+ useCompressedOops, useG1GC);
}
public static JvmInfo jvmInfo() {
@@ -166,40 +167,100 @@ public class JvmInfo implements Streamable, ToXContent {
return INSTANCE;
}
- long pid = -1;
+ private final long pid;
+ private final String version;
+ private final String vmName;
+ private final String vmVersion;
+ private final String vmVendor;
+ private final long startTime;
+ private final long configuredInitialHeapSize;
+ private final long configuredMaxHeapSize;
+ private final Mem mem;
+ private final String[] inputArguments;
+ private final String bootClassPath;
+ private final String classPath;
+ private final Map systemProperties;
+ private final String[] gcCollectors;
+ private final String[] memoryPools;
+ private final String onError;
+ private final String onOutOfMemoryError;
+ private final String useCompressedOops;
+ private final String useG1GC;
- String version = "";
- String vmName = "";
- String vmVersion = "";
- String vmVendor = "";
+ private JvmInfo(long pid, String version, String vmName, String vmVersion, String vmVendor, long startTime,
+ long configuredInitialHeapSize, long configuredMaxHeapSize, Mem mem, String[] inputArguments, String bootClassPath,
+ String classPath, Map systemProperties, String[] gcCollectors, String[] memoryPools, String onError,
+ String onOutOfMemoryError, String useCompressedOops, String useG1GC) {
+ this.pid = pid;
+ this.version = version;
+ this.vmName = vmName;
+ this.vmVersion = vmVersion;
+ this.vmVendor = vmVendor;
+ this.startTime = startTime;
+ this.configuredInitialHeapSize = configuredInitialHeapSize;
+ this.configuredMaxHeapSize = configuredMaxHeapSize;
+ this.mem = mem;
+ this.inputArguments = inputArguments;
+ this.bootClassPath = bootClassPath;
+ this.classPath = classPath;
+ this.systemProperties = systemProperties;
+ this.gcCollectors = gcCollectors;
+ this.memoryPools = memoryPools;
+ this.onError = onError;
+ this.onOutOfMemoryError = onOutOfMemoryError;
+ this.useCompressedOops = useCompressedOops;
+ this.useG1GC = useG1GC;
+ }
- long startTime = -1;
+ public JvmInfo(StreamInput in) throws IOException {
+ pid = in.readLong();
+ version = in.readString();
+ vmName = in.readString();
+ vmVersion = in.readString();
+ vmVendor = in.readString();
+ startTime = in.readLong();
+ inputArguments = new String[in.readInt()];
+ for (int i = 0; i < inputArguments.length; i++) {
+ inputArguments[i] = in.readString();
+ }
+ bootClassPath = in.readString();
+ classPath = in.readString();
+ systemProperties = in.readMap(StreamInput::readString, StreamInput::readString);
+ mem = new Mem(in);
+ gcCollectors = in.readStringArray();
+ memoryPools = in.readStringArray();
+ useCompressedOops = in.readString();
+ //the following members are only used locally for boostrap checks, never serialized nor printed out
+ this.configuredMaxHeapSize = -1;
+ this.configuredInitialHeapSize = -1;
+ this.onError = null;
+ this.onOutOfMemoryError = null;
+ this.useG1GC = "unknown";
+ }
- private long configuredInitialHeapSize;
- private long configuredMaxHeapSize;
-
- Mem mem;
-
- String[] inputArguments;
-
- String bootClassPath;
-
- String classPath;
-
- Map systemProperties;
-
- String[] gcCollectors = Strings.EMPTY_ARRAY;
- String[] memoryPools = Strings.EMPTY_ARRAY;
-
- private String onError;
-
- private String onOutOfMemoryError;
-
- private String useCompressedOops = "unknown";
-
- private String useG1GC = "unknown";
-
- private JvmInfo() {
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeLong(pid);
+ out.writeString(version);
+ out.writeString(vmName);
+ out.writeString(vmVersion);
+ out.writeString(vmVendor);
+ out.writeLong(startTime);
+ out.writeInt(inputArguments.length);
+ for (String inputArgument : inputArguments) {
+ out.writeString(inputArgument);
+ }
+ out.writeString(bootClassPath);
+ out.writeString(classPath);
+ out.writeVInt(this.systemProperties.size());
+ for (Map.Entry entry : systemProperties.entrySet()) {
+ out.writeString(entry.getKey());
+ out.writeString(entry.getValue());
+ }
+ mem.writeTo(out);
+ out.writeStringArray(gcCollectors);
+ out.writeStringArray(memoryPools);
+ out.writeString(useCompressedOops);
}
/**
@@ -354,6 +415,14 @@ public class JvmInfo implements Streamable, ToXContent {
return this.useG1GC;
}
+ public String[] getGcCollectors() {
+ return gcCollectors;
+ }
+
+ public String[] getMemoryPools() {
+ return memoryPools;
+ }
+
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.JVM);
@@ -407,72 +476,37 @@ public class JvmInfo implements Streamable, ToXContent {
static final String USING_COMPRESSED_OOPS = "using_compressed_ordinary_object_pointers";
}
- public static JvmInfo readJvmInfo(StreamInput in) throws IOException {
- JvmInfo jvmInfo = new JvmInfo();
- jvmInfo.readFrom(in);
- return jvmInfo;
- }
+ public static class Mem implements Writeable {
- @Override
- public void readFrom(StreamInput in) throws IOException {
- pid = in.readLong();
- version = in.readString();
- vmName = in.readString();
- vmVersion = in.readString();
- vmVendor = in.readString();
- startTime = in.readLong();
- inputArguments = new String[in.readInt()];
- for (int i = 0; i < inputArguments.length; i++) {
- inputArguments[i] = in.readString();
+ private final long heapInit;
+ private final long heapMax;
+ private final long nonHeapInit;
+ private final long nonHeapMax;
+ private final long directMemoryMax;
+
+ public Mem(long heapInit, long heapMax, long nonHeapInit, long nonHeapMax, long directMemoryMax) {
+ this.heapInit = heapInit;
+ this.heapMax = heapMax;
+ this.nonHeapInit = nonHeapInit;
+ this.nonHeapMax = nonHeapMax;
+ this.directMemoryMax = directMemoryMax;
}
- bootClassPath = in.readString();
- classPath = in.readString();
- systemProperties = new HashMap<>();
- int size = in.readInt();
- for (int i = 0; i < size; i++) {
- systemProperties.put(in.readString(), in.readString());
+
+ public Mem(StreamInput in) throws IOException {
+ this.heapInit = in.readVLong();
+ this.heapMax = in.readVLong();
+ this.nonHeapInit = in.readVLong();
+ this.nonHeapMax = in.readVLong();
+ this.directMemoryMax = in.readVLong();
}
- mem = new Mem();
- mem.readFrom(in);
- gcCollectors = in.readStringArray();
- memoryPools = in.readStringArray();
- useCompressedOops = in.readString();
- }
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeLong(pid);
- out.writeString(version);
- out.writeString(vmName);
- out.writeString(vmVersion);
- out.writeString(vmVendor);
- out.writeLong(startTime);
- out.writeInt(inputArguments.length);
- for (String inputArgument : inputArguments) {
- out.writeString(inputArgument);
- }
- out.writeString(bootClassPath);
- out.writeString(classPath);
- out.writeInt(systemProperties.size());
- for (Map.Entry entry : systemProperties.entrySet()) {
- out.writeString(entry.getKey());
- out.writeString(entry.getValue());
- }
- mem.writeTo(out);
- out.writeStringArray(gcCollectors);
- out.writeStringArray(memoryPools);
- out.writeString(useCompressedOops);
- }
-
- public static class Mem implements Streamable {
-
- long heapInit = 0;
- long heapMax = 0;
- long nonHeapInit = 0;
- long nonHeapMax = 0;
- long directMemoryMax = 0;
-
- Mem() {
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVLong(heapInit);
+ out.writeVLong(heapMax);
+ out.writeVLong(nonHeapInit);
+ out.writeVLong(nonHeapMax);
+ out.writeVLong(directMemoryMax);
}
public ByteSizeValue getHeapInit() {
@@ -494,23 +528,5 @@ public class JvmInfo implements Streamable, ToXContent {
public ByteSizeValue getDirectMemoryMax() {
return new ByteSizeValue(directMemoryMax);
}
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- heapInit = in.readVLong();
- heapMax = in.readVLong();
- nonHeapInit = in.readVLong();
- nonHeapMax = in.readVLong();
- directMemoryMax = in.readVLong();
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeVLong(heapInit);
- out.writeVLong(heapMax);
- out.writeVLong(nonHeapInit);
- out.writeVLong(nonHeapMax);
- out.writeVLong(directMemoryMax);
- }
}
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java b/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java
index 599755e78a4..af6ea851803 100644
--- a/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java
+++ b/core/src/main/java/org/elasticsearch/monitor/os/DummyOsInfo.java
@@ -21,13 +21,8 @@ package org.elasticsearch.monitor.os;
public class DummyOsInfo extends OsInfo {
- DummyOsInfo() {
- refreshInterval = 0;
- availableProcessors = 0;
- allocatedProcessors = 0;
- name = "dummy_name";
- arch = "dummy_arch";
- version = "dummy_version";
+ private DummyOsInfo() {
+ super(0, 0, 0, "dummy_name", "dummy_arch", "dummy_version");
}
public static final DummyOsInfo INSTANCE = new DummyOsInfo();
diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java b/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java
index f0520358524..7a0175c31d1 100644
--- a/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java
+++ b/core/src/main/java/org/elasticsearch/monitor/os/OsInfo.java
@@ -21,25 +21,47 @@ package org.elasticsearch.monitor.os;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
-public class OsInfo implements Streamable, ToXContent {
+public class OsInfo implements Writeable, ToXContent {
- long refreshInterval;
+ private final long refreshInterval;
+ private final int availableProcessors;
+ private final int allocatedProcessors;
+ private final String name;
+ private final String arch;
+ private final String version;
- int availableProcessors;
+ public OsInfo(long refreshInterval, int availableProcessors, int allocatedProcessors, String name, String arch, String version) {
+ this.refreshInterval = refreshInterval;
+ this.availableProcessors = availableProcessors;
+ this.allocatedProcessors = allocatedProcessors;
+ this.name = name;
+ this.arch = arch;
+ this.version = version;
+ }
- int allocatedProcessors;
+ public OsInfo(StreamInput in) throws IOException {
+ this.refreshInterval = in.readLong();
+ this.availableProcessors = in.readInt();
+ this.allocatedProcessors = in.readInt();
+ this.name = in.readOptionalString();
+ this.arch = in.readOptionalString();
+ this.version = in.readOptionalString();
+ }
- String name = null;
- String arch = null;
- String version = null;
-
- OsInfo() {
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeLong(refreshInterval);
+ out.writeInt(availableProcessors);
+ out.writeInt(allocatedProcessors);
+ out.writeOptionalString(name);
+ out.writeOptionalString(arch);
+ out.writeOptionalString(version);
}
public long getRefreshInterval() {
@@ -95,30 +117,4 @@ public class OsInfo implements Streamable, ToXContent {
builder.endObject();
return builder;
}
-
- public static OsInfo readOsInfo(StreamInput in) throws IOException {
- OsInfo info = new OsInfo();
- info.readFrom(in);
- return info;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- refreshInterval = in.readLong();
- availableProcessors = in.readInt();
- allocatedProcessors = in.readInt();
- name = in.readOptionalString();
- arch = in.readOptionalString();
- version = in.readOptionalString();
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeLong(refreshInterval);
- out.writeInt(availableProcessors);
- out.writeInt(allocatedProcessors);
- out.writeOptionalString(name);
- out.writeOptionalString(arch);
- out.writeOptionalString(version);
- }
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
index 03720658e24..08abfc05f1d 100644
--- a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
+++ b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
@@ -163,13 +163,9 @@ public class OsProbe {
private OsProbe() {
}
- public OsInfo osInfo() {
- OsInfo info = new OsInfo();
- info.availableProcessors = Runtime.getRuntime().availableProcessors();
- info.name = Constants.OS_NAME;
- info.arch = Constants.OS_ARCH;
- info.version = Constants.OS_VERSION;
- return info;
+ public OsInfo osInfo(long refreshInterval, int allocatedProcessors) {
+ return new OsInfo(refreshInterval, Runtime.getRuntime().availableProcessors(),
+ allocatedProcessors, Constants.OS_NAME, Constants.OS_ARCH, Constants.OS_VERSION);
}
public OsStats osStats() {
diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java
index 248b49f21cb..cb67eef852c 100644
--- a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java
@@ -27,32 +27,22 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.SingleObjectCache;
import org.elasticsearch.common.util.concurrent.EsExecutors;
-/**
- *
- */
public class OsService extends AbstractComponent {
private final OsProbe probe;
-
private final OsInfo info;
-
- private SingleObjectCache osStatsCache;
+ private final SingleObjectCache osStatsCache;
public static final Setting REFRESH_INTERVAL_SETTING =
Setting.timeSetting("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1),
- Property.NodeScope);
+ Property.NodeScope);
public OsService(Settings settings) {
super(settings);
this.probe = OsProbe.getInstance();
-
TimeValue refreshInterval = REFRESH_INTERVAL_SETTING.get(settings);
-
- this.info = probe.osInfo();
- this.info.refreshInterval = refreshInterval.millis();
- this.info.allocatedProcessors = EsExecutors.boundedNumberOfProcessors(settings);
-
- osStatsCache = new OsStatsCache(refreshInterval, probe.osStats());
+ this.info = probe.osInfo(refreshInterval.millis(), EsExecutors.boundedNumberOfProcessors(settings));
+ this.osStatsCache = new OsStatsCache(refreshInterval, probe.osStats());
logger.debug("using refresh_interval [{}]", refreshInterval);
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java
index cf9c9e63b87..a0e3e7a70f2 100644
--- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java
+++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java
@@ -21,26 +21,35 @@ package org.elasticsearch.monitor.process;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
-public class ProcessInfo implements Streamable, ToXContent {
+public class ProcessInfo implements Writeable, ToXContent {
- long refreshInterval;
+ private final long refreshInterval;
+ private final long id;
+ private final boolean mlockall;
- private long id;
-
- private boolean mlockall;
-
- ProcessInfo() {
- }
-
- public ProcessInfo(long id, boolean mlockall) {
+ public ProcessInfo(long id, boolean mlockall, long refreshInterval) {
this.id = id;
this.mlockall = mlockall;
+ this.refreshInterval = refreshInterval;
+ }
+
+ public ProcessInfo(StreamInput in) throws IOException {
+ refreshInterval = in.readLong();
+ id = in.readLong();
+ mlockall = in.readBoolean();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeLong(refreshInterval);
+ out.writeLong(id);
+ out.writeBoolean(mlockall);
}
public long refreshInterval() {
@@ -79,24 +88,4 @@ public class ProcessInfo implements Streamable, ToXContent {
builder.endObject();
return builder;
}
-
- public static ProcessInfo readProcessInfo(StreamInput in) throws IOException {
- ProcessInfo info = new ProcessInfo();
- info.readFrom(in);
- return info;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- refreshInterval = in.readLong();
- id = in.readLong();
- mlockall = in.readBoolean();
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeLong(refreshInterval);
- out.writeLong(id);
- out.writeBoolean(mlockall);
- }
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessProbe.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessProbe.java
index b19b54a9478..e4307f724c5 100644
--- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessProbe.java
+++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessProbe.java
@@ -126,8 +126,8 @@ public class ProcessProbe {
return -1;
}
- public ProcessInfo processInfo() {
- return new ProcessInfo(jvmInfo().pid(), BootstrapInfo.isMemoryLocked());
+ public ProcessInfo processInfo(long refreshInterval) {
+ return new ProcessInfo(jvmInfo().pid(), BootstrapInfo.isMemoryLocked(), refreshInterval);
}
public ProcessStats processStats() {
diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java
index 38593534480..99593003b34 100644
--- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java
@@ -42,11 +42,9 @@ public final class ProcessService extends AbstractComponent {
public ProcessService(Settings settings) {
super(settings);
this.probe = ProcessProbe.getInstance();
-
final TimeValue refreshInterval = REFRESH_INTERVAL_SETTING.get(settings);
processStatsCache = new ProcessStatsCache(refreshInterval, probe.processStats());
- this.info = probe.processInfo();
- this.info.refreshInterval = refreshInterval.millis();
+ this.info = probe.processInfo(refreshInterval.millis());
logger.debug("using refresh_interval [{}]", refreshInterval);
}
diff --git a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java
index 5dab19581a3..90b1d32f4ae 100644
--- a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java
+++ b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java
@@ -24,5 +24,6 @@ public class DummyPluginInfo extends PluginInfo {
super(name, description, version, classname);
}
- public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName");
+ public static final DummyPluginInfo INSTANCE = new DummyPluginInfo(
+ "dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName");
}
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java
index 500861d8999..3e241eadd37 100644
--- a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java
@@ -22,7 +22,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.bootstrap.JarHell;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -32,7 +32,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Properties;
-public class PluginInfo implements Streamable, ToXContent {
+public class PluginInfo implements Writeable, ToXContent {
public static final String ES_PLUGIN_PROPERTIES = "plugin-descriptor.properties";
public static final String ES_PLUGIN_POLICY = "plugin-security.policy";
@@ -45,13 +45,10 @@ public class PluginInfo implements Streamable, ToXContent {
static final String CLASSNAME = "classname";
}
- private String name;
- private String description;
- private String version;
- private String classname;
-
- public PluginInfo() {
- }
+ private final String name;
+ private final String description;
+ private final String version;
+ private final String classname;
/**
* Information about plugins
@@ -60,13 +57,28 @@ public class PluginInfo implements Streamable, ToXContent {
* @param description Its description
* @param version Version number
*/
- PluginInfo(String name, String description, String version, String classname) {
+ public PluginInfo(String name, String description, String version, String classname) {
this.name = name;
this.description = description;
this.version = version;
this.classname = classname;
}
+ public PluginInfo(StreamInput in) throws IOException {
+ this.name = in.readString();
+ this.description = in.readString();
+ this.version = in.readString();
+ this.classname = in.readString();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(name);
+ out.writeString(description);
+ out.writeString(version);
+ out.writeString(classname);
+ }
+
/** reads (and validates) plugin metadata descriptor file */
public static PluginInfo readFromProperties(Path dir) throws IOException {
Path descriptor = dir.resolve(ES_PLUGIN_PROPERTIES);
@@ -138,28 +150,6 @@ public class PluginInfo implements Streamable, ToXContent {
return version;
}
- public static PluginInfo readFromStream(StreamInput in) throws IOException {
- PluginInfo info = new PluginInfo();
- info.readFrom(in);
- return info;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- this.name = in.readString();
- this.description = in.readString();
- this.version = in.readString();
- this.classname = in.readString();
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeString(name);
- out.writeString(description);
- out.writeString(version);
- out.writeString(classname);
- }
-
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
index e13bdbec879..01704e9ed86 100644
--- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
@@ -108,10 +108,9 @@ public class PluginsService extends AbstractComponent {
*/
public PluginsService(Settings settings, Path modulesDirectory, Path pluginsDirectory, Collection> classpathPlugins) {
super(settings);
- info = new PluginsAndModules();
List> pluginsLoaded = new ArrayList<>();
-
+ List pluginsList = new ArrayList<>();
// first we load plugins that are on the classpath. this is for tests and transport clients
for (Class extends Plugin> pluginClass : classpathPlugins) {
Plugin plugin = loadPlugin(pluginClass, settings);
@@ -120,9 +119,10 @@ public class PluginsService extends AbstractComponent {
logger.trace("plugin loaded from classpath [{}]", pluginInfo);
}
pluginsLoaded.add(new Tuple<>(pluginInfo, plugin));
- info.addPlugin(pluginInfo);
+ pluginsList.add(pluginInfo);
}
+ List modulesList = new ArrayList<>();
// load modules
if (modulesDirectory != null) {
try {
@@ -130,7 +130,7 @@ public class PluginsService extends AbstractComponent {
List> loaded = loadBundles(bundles);
pluginsLoaded.addAll(loaded);
for (Tuple module : loaded) {
- info.addModule(module.v1());
+ modulesList.add(module.v1());
}
} catch (IOException ex) {
throw new IllegalStateException("Unable to initialize modules", ex);
@@ -144,18 +144,19 @@ public class PluginsService extends AbstractComponent {
List> loaded = loadBundles(bundles);
pluginsLoaded.addAll(loaded);
for (Tuple plugin : loaded) {
- info.addPlugin(plugin.v1());
+ pluginsList.add(plugin.v1());
}
} catch (IOException ex) {
throw new IllegalStateException("Unable to initialize plugins", ex);
}
}
- plugins = Collections.unmodifiableList(pluginsLoaded);
+ this.info = new PluginsAndModules(pluginsList, modulesList);
+ this.plugins = Collections.unmodifiableList(pluginsLoaded);
// We need to build a List of plugins for checking mandatory plugins
Set pluginsNames = new HashSet<>();
- for (Tuple tuple : plugins) {
+ for (Tuple tuple : this.plugins) {
pluginsNames.add(tuple.v1().getName());
}
@@ -179,7 +180,7 @@ public class PluginsService extends AbstractComponent {
logPluginInfo(info.getPluginInfos(), "plugin", logger);
Map> onModuleReferences = new HashMap<>();
- for (Tuple pluginEntry : plugins) {
+ for (Tuple pluginEntry : this.plugins) {
Plugin plugin = pluginEntry.v2();
List list = new ArrayList<>();
for (Method method : plugin.getClass().getMethods()) {
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java
index 7801aad8e0b..04d0bf57612 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java
@@ -50,7 +50,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
public static final ParseField TEXT = new ParseField("text");
public static final ParseField FIELD = new ParseField("field");
public static final ParseField TOKENIZER = new ParseField("tokenizer");
- public static final ParseField TOKEN_FILTERS = new ParseField("filter", "token_filter");
+ public static final ParseField TOKEN_FILTERS = new ParseField("filter");
public static final ParseField CHAR_FILTERS = new ParseField("char_filter");
public static final ParseField EXPLAIN = new ParseField("explain");
public static final ParseField ATTRIBUTES = new ParseField("attributes");
@@ -77,7 +77,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
if (request.hasParam("tokenizer")) {
analyzeRequest.tokenizer(request.param("tokenizer"));
}
- for (String filter : request.paramAsStringArray("filter", request.paramAsStringArray("token_filter", Strings.EMPTY_ARRAY))) {
+ for (String filter : request.paramAsStringArray("filter", Strings.EMPTY_ARRAY)) {
analyzeRequest.addTokenFilter(filter);
}
for (String charFilter : request.paramAsStringArray("char_filter", Strings.EMPTY_ARRAY)) {
@@ -144,7 +144,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
analyzeRequest.addTokenFilter(parser.map());
} else {
throw new IllegalArgumentException(currentFieldName
- + " array element should contain token_filter's name or setting");
+ + " array element should contain filter's name or setting");
}
}
} else if (parseFieldMatcher.match(currentFieldName, Fields.CHAR_FILTERS)
diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
index 38fd73f73ec..d2ff4defc9e 100644
--- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
+++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
@@ -27,7 +27,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.SizeValue;
@@ -529,18 +529,14 @@ public class ThreadPool extends AbstractComponent implements Closeable {
}
}
- public static class Info implements Streamable, ToXContent {
+ public static class Info implements Writeable, ToXContent {
- private String name;
- private ThreadPoolType type;
- private int min;
- private int max;
- private TimeValue keepAlive;
- private SizeValue queueSize;
-
- Info() {
-
- }
+ private final String name;
+ private final ThreadPoolType type;
+ private final int min;
+ private final int max;
+ private final TimeValue keepAlive;
+ private final SizeValue queueSize;
public Info(String name, ThreadPoolType type) {
this(name, type, -1);
@@ -559,6 +555,25 @@ public class ThreadPool extends AbstractComponent implements Closeable {
this.queueSize = queueSize;
}
+ public Info(StreamInput in) throws IOException {
+ name = in.readString();
+ type = ThreadPoolType.fromType(in.readString());
+ min = in.readInt();
+ max = in.readInt();
+ keepAlive = in.readOptionalWriteable(TimeValue::new);
+ queueSize = in.readOptionalWriteable(SizeValue::new);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(name);
+ out.writeString(type.getType());
+ out.writeInt(min);
+ out.writeInt(max);
+ out.writeOptionalWriteable(keepAlive);
+ out.writeOptionalWriteable(queueSize);
+ }
+
public String getName() {
return this.name;
}
@@ -585,46 +600,6 @@ public class ThreadPool extends AbstractComponent implements Closeable {
return this.queueSize;
}
- @Override
- public void readFrom(StreamInput in) throws IOException {
- name = in.readString();
- type = ThreadPoolType.fromType(in.readString());
- min = in.readInt();
- max = in.readInt();
- if (in.readBoolean()) {
- keepAlive = new TimeValue(in);
- }
- if (in.readBoolean()) {
- queueSize = SizeValue.readSizeValue(in);
- }
- in.readBoolean(); // here to conform with removed waitTime
- in.readBoolean(); // here to conform with removed rejected setting
- in.readBoolean(); // here to conform with queue type
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeString(name);
- out.writeString(type.getType());
- out.writeInt(min);
- out.writeInt(max);
- if (keepAlive == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- keepAlive.writeTo(out);
- }
- if (queueSize == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- queueSize.writeTo(out);
- }
- out.writeBoolean(false); // here to conform with removed waitTime
- out.writeBoolean(false); // here to conform with removed rejected setting
- out.writeBoolean(false); // here to conform with queue type
- }
-
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
@@ -654,7 +629,6 @@ public class ThreadPool extends AbstractComponent implements Closeable {
static final String KEEP_ALIVE = "keep_alive";
static final String QUEUE_SIZE = "queue_size";
}
-
}
/**
diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java
index 729c6cb7364..70c0f2c9598 100644
--- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java
+++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPoolInfo.java
@@ -21,27 +21,30 @@ package org.elasticsearch.threadpool;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
-import java.util.ArrayList;
+import java.util.Collections;
import java.util.Iterator;
import java.util.List;
-/**
- */
-public class ThreadPoolInfo implements Streamable, Iterable, ToXContent {
-
- private List infos;
-
- ThreadPoolInfo() {
- }
+public class ThreadPoolInfo implements Writeable, Iterable, ToXContent {
+ private final List infos;
public ThreadPoolInfo(List infos) {
- this.infos = infos;
+ this.infos = Collections.unmodifiableList(infos);
+ }
+
+ public ThreadPoolInfo(StreamInput in) throws IOException {
+ this.infos = Collections.unmodifiableList(in.readList(ThreadPool.Info::new));
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeList(infos);
}
@Override
@@ -49,31 +52,6 @@ public class ThreadPoolInfo implements Streamable, Iterable, To
return infos.iterator();
}
- public static ThreadPoolInfo readThreadPoolInfo(StreamInput in) throws IOException {
- ThreadPoolInfo info = new ThreadPoolInfo();
- info.readFrom(in);
- return info;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- int size = in.readVInt();
- infos = new ArrayList<>(size);
- for (int i = 0; i < size; i++) {
- ThreadPool.Info info = new ThreadPool.Info();
- info.readFrom(in);
- infos.add(info);
- }
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeVInt(infos.size());
- for (ThreadPool.Info info : infos) {
- info.writeTo(out);
- }
- }
-
static final class Fields {
static final String THREAD_POOL = "thread_pool";
}
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportInfo.java b/core/src/main/java/org/elasticsearch/transport/TransportInfo.java
index 236c0d50a98..fbabf49b65d 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportInfo.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportInfo.java
@@ -22,7 +22,7 @@ package org.elasticsearch.transport;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -31,56 +31,17 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
-/**
- *
- */
-public class TransportInfo implements Streamable, ToXContent {
+public class TransportInfo implements Writeable, ToXContent {
private BoundTransportAddress address;
private Map profileAddresses;
- TransportInfo() {
- }
-
public TransportInfo(BoundTransportAddress address, @Nullable Map profileAddresses) {
this.address = address;
this.profileAddresses = profileAddresses;
}
- static final class Fields {
- static final String TRANSPORT = "transport";
- static final String BOUND_ADDRESS = "bound_address";
- static final String PUBLISH_ADDRESS = "publish_address";
- static final String PROFILES = "profiles";
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject(Fields.TRANSPORT);
- builder.array(Fields.BOUND_ADDRESS, (Object[]) address.boundAddresses());
- builder.field(Fields.PUBLISH_ADDRESS, address.publishAddress().toString());
- builder.startObject(Fields.PROFILES);
- if (profileAddresses != null && profileAddresses.size() > 0) {
- for (Map.Entry entry : profileAddresses.entrySet()) {
- builder.startObject(entry.getKey());
- builder.array(Fields.BOUND_ADDRESS, (Object[]) entry.getValue().boundAddresses());
- builder.field(Fields.PUBLISH_ADDRESS, entry.getValue().publishAddress().toString());
- builder.endObject();
- }
- }
- builder.endObject();
- builder.endObject();
- return builder;
- }
-
- public static TransportInfo readTransportInfo(StreamInput in) throws IOException {
- TransportInfo info = new TransportInfo();
- info.readFrom(in);
- return info;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
+ public TransportInfo(StreamInput in) throws IOException {
address = BoundTransportAddress.readBoundTransportAddress(in);
int size = in.readVInt();
if (size > 0) {
@@ -109,6 +70,32 @@ public class TransportInfo implements Streamable, ToXContent {
}
}
+ static final class Fields {
+ static final String TRANSPORT = "transport";
+ static final String BOUND_ADDRESS = "bound_address";
+ static final String PUBLISH_ADDRESS = "publish_address";
+ static final String PROFILES = "profiles";
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(Fields.TRANSPORT);
+ builder.array(Fields.BOUND_ADDRESS, (Object[]) address.boundAddresses());
+ builder.field(Fields.PUBLISH_ADDRESS, address.publishAddress().toString());
+ builder.startObject(Fields.PROFILES);
+ if (profileAddresses != null && profileAddresses.size() > 0) {
+ for (Map.Entry entry : profileAddresses.entrySet()) {
+ builder.startObject(entry.getKey());
+ builder.array(Fields.BOUND_ADDRESS, (Object[]) entry.getValue().boundAddresses());
+ builder.field(Fields.PUBLISH_ADDRESS, entry.getValue().publishAddress().toString());
+ builder.endObject();
+ }
+ }
+ builder.endObject();
+ builder.endObject();
+ return builder;
+ }
+
public BoundTransportAddress address() {
return address;
}
diff --git a/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java
index cbfc14a7d04..5a2b8a5a143 100644
--- a/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java
+++ b/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java
@@ -19,25 +19,25 @@
package org.elasticsearch.bootstrap;
+import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.message.ParameterizedMessage;
-import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.util.Constants;
-import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.io.PathUtils;
+import org.elasticsearch.common.logging.ESLoggerFactory;
+import org.elasticsearch.common.logging.TestLoggers;
import org.elasticsearch.test.ESTestCase;
-import org.hamcrest.Matcher;
-import org.mockito.ArgumentMatcher;
+import org.elasticsearch.test.MockLogAppender;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.function.Predicate;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.argThat;
-import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
@@ -52,8 +52,7 @@ public class MaxMapCountCheckTests extends ESTestCase {
}
}
- @SuppressLoggerChecks(reason = "mock usage")
- public void testGetMaxMapCount() throws IOException {
+ public void testGetMaxMapCount() throws IOException, IllegalAccessException {
final long procSysVmMaxMapCount = randomIntBetween(1, Integer.MAX_VALUE);
final BufferedReader reader = mock(BufferedReader.class);
when(reader.readLine()).thenReturn(Long.toString(procSysVmMaxMapCount));
@@ -69,31 +68,92 @@ public class MaxMapCountCheckTests extends ESTestCase {
assertThat(check.getMaxMapCount(), equalTo(procSysVmMaxMapCount));
verify(reader).close();
- reset(reader);
- final IOException ioException = new IOException("fatal");
- when(reader.readLine()).thenThrow(ioException);
- final Logger logger = mock(Logger.class);
- assertThat(check.getMaxMapCount(logger), equalTo(-1L));
- verify(logger).warn(argThat(argumentMatcher("I/O exception while trying to read [/proc/sys/vm/max_map_count]")), eq(ioException));
- verify(reader).close();
+ {
+ reset(reader);
+ final IOException ioException = new IOException("fatal");
+ when(reader.readLine()).thenThrow(ioException);
+ final Logger logger = ESLoggerFactory.getLogger("testGetMaxMapCountIOException");
+ final MockLogAppender appender = new MockLogAppender();
+ appender.addExpectation(
+ new ParameterizedMessageLoggingExpectation(
+ "expected logged I/O exception",
+ "testGetMaxMapCountIOException",
+ Level.WARN,
+ "I/O exception while trying to read [{}]",
+ new Object[] { procSysVmMaxMapCountPath },
+ e -> ioException == e));
+ TestLoggers.addAppender(logger, appender);
+ assertThat(check.getMaxMapCount(logger), equalTo(-1L));
+ appender.assertAllExpectationsMatched();
+ verify(reader).close();
+ TestLoggers.removeAppender(logger, appender);
+ }
+
+ {
+ reset(reader);
+ when(reader.readLine()).thenReturn("eof");
+ final Logger logger = ESLoggerFactory.getLogger("testGetMaxMapCountNumberFormatException");
+ final MockLogAppender appender = new MockLogAppender();
+ appender.addExpectation(
+ new ParameterizedMessageLoggingExpectation(
+ "expected logged number format exception",
+ "testGetMaxMapCountNumberFormatException",
+ Level.WARN,
+ "unable to parse vm.max_map_count [{}]",
+ new Object[] { "eof" },
+ e -> e instanceof NumberFormatException && e.getMessage().equals("For input string: \"eof\"")));
+ TestLoggers.addAppender(logger, appender);
+ assertThat(check.getMaxMapCount(logger), equalTo(-1L));
+ appender.assertAllExpectationsMatched();
+ verify(reader).close();
+ TestLoggers.removeAppender(logger, appender);
+ }
- reset(reader);
- reset(logger);
- when(reader.readLine()).thenReturn("eof");
- assertThat(check.getMaxMapCount(logger), equalTo(-1L));
- verify(logger).warn(argThat(argumentMatcher("unable to parse vm.max_map_count [eof]")), any(NumberFormatException.class));
- verify(reader).close();
}
- private ArgumentMatcher> argumentMatcher(final String message) {
- return new ArgumentMatcher>() {
- @Override
- public boolean matches(Object o) {
- final Supplier> supplier = (Supplier>)o;
- final ParameterizedMessage parameterizedMessage = (ParameterizedMessage) supplier.get();
- return parameterizedMessage.getFormattedMessage().equals(message);
+ private static class ParameterizedMessageLoggingExpectation implements MockLogAppender.LoggingExpectation {
+
+ private boolean saw = false;
+
+ private final String name;
+ private final String loggerName;
+ private final Level level;
+ private final String messagePattern;
+ private final Object[] arguments;
+ private final Predicate throwablePredicate;
+
+ private ParameterizedMessageLoggingExpectation(
+ final String name,
+ final String loggerName,
+ final Level level,
+ final String messagePattern,
+ final Object[] arguments,
+ final Predicate throwablePredicate) {
+ this.name = name;
+ this.loggerName = loggerName;
+ this.level = level;
+ this.messagePattern = messagePattern;
+ this.arguments = arguments;
+ this.throwablePredicate = throwablePredicate;
+ }
+
+ @Override
+ public void match(LogEvent event) {
+ if (event.getLevel().equals(level) &&
+ event.getLoggerName().equals(loggerName) &&
+ event.getMessage() instanceof ParameterizedMessage) {
+ final ParameterizedMessage message = (ParameterizedMessage)event.getMessage();
+ saw = message.getFormat().equals(messagePattern) &&
+ Arrays.deepEquals(arguments, message.getParameters()) &&
+ throwablePredicate.test(event.getThrown());
}
- };
+ }
+
+ @Override
+ public void assertMatched() {
+ assertTrue(name, saw);
+ }
+
}
public void testMaxMapCountCheckRead() throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
index 45c89062a5a..3d35c42adcf 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
@@ -103,8 +103,8 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
@Before
public void initIndexesList() throws Exception {
- indexes = OldIndexUtils.loadIndexesList("index", getBwcIndicesPath());
- unsupportedIndexes = OldIndexUtils.loadIndexesList("unsupported", getBwcIndicesPath());
+ indexes = OldIndexUtils.loadDataFilesList("index", getBwcIndicesPath());
+ unsupportedIndexes = OldIndexUtils.loadDataFilesList("unsupported", getBwcIndicesPath());
}
@AfterClass
diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
index 7e69003a1ea..26bb97fcc04 100644
--- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
@@ -330,6 +330,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
}
}
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/20318")
public void testLoggerLevelUpdate() {
assertAcked(prepareCreate("test"));
final IllegalArgumentException e =
diff --git a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java
index b075e9d56d7..5104e56cb35 100644
--- a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java
+++ b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java
@@ -20,9 +20,13 @@
package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.MatcherAssert;
+import java.io.IOException;
+
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@@ -165,4 +169,15 @@ public class ByteSizeValueTests extends ESTestCase {
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
}
}
+
+ public void testSerialization() throws IOException {
+ ByteSizeValue byteSizeValue = new ByteSizeValue(randomPositiveLong(), randomFrom(ByteSizeUnit.values()));
+ try (BytesStreamOutput out = new BytesStreamOutput()) {
+ byteSizeValue.writeTo(out);
+ try (StreamInput in = out.bytes().streamInput()) {
+ ByteSizeValue deserializedByteSizeValue = new ByteSizeValue(in);
+ assertEquals(byteSizeValue, deserializedByteSizeValue);
+ }
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java
index 4d962fb6c88..989c60fc916 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java
@@ -594,6 +594,12 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
* Tests that shadow replicas can be "naturally" rebalanced and relocated
* around the cluster. By "naturally" I mean without using the reroute API
*/
+ // This test failed on CI when trying to assert that all the shard data has been deleted
+ // from the index path. It has not been reproduced locally. Despite the IndicesService
+ // deleting the index and hence, deleting all the shard data for the index, the test
+ // failure still showed some Lucene files in the data directory for that index. Not sure
+ // why that is, so turning on more logging here.
+ @TestLogging("indices:TRACE,env:TRACE")
public void testShadowReplicaNaturalRelocation() throws Exception {
Path dataPath = createTempDir();
Settings nodeSettings = nodeSettings(dataPath);
diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
index 0e9d7aeb396..7b62966515d 100644
--- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
+++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
@@ -53,6 +53,7 @@ import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
+import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
@@ -206,7 +207,8 @@ public class InternalEngineTests extends ESTestCase {
return new EngineConfig(openMode, config.getShardId(), config.getThreadPool(), config.getIndexSettings(), config.getWarmer(),
config.getStore(), config.getDeletionPolicy(), config.getMergePolicy(), config.getAnalyzer(), config.getSimilarity(),
new CodecService(null, logger), config.getEventListener(), config.getTranslogRecoveryPerformer(), config.getQueryCache(),
- config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getRefreshListeners());
+ config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getRefreshListeners(),
+ config.getMaxUnsafeAutoIdTimestamp());
}
@Override
@@ -276,7 +278,7 @@ public class InternalEngineTests extends ESTestCase {
}
protected InternalEngine createEngine(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy) throws IOException {
- EngineConfig config = config(indexSettings, store, translogPath, mergePolicy);
+ EngineConfig config = config(indexSettings, store, translogPath, mergePolicy, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
InternalEngine internalEngine = new InternalEngine(config);
if (config.getOpenMode() == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) {
internalEngine.recoverFromTranslog();
@@ -284,7 +286,7 @@ public class InternalEngineTests extends ESTestCase {
return internalEngine;
}
- public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy) {
+ public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, long maxUnsafeAutoIdTimestamp) {
IndexWriterConfig iwc = newIndexWriterConfig();
TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE);
final EngineConfig.OpenMode openMode;
@@ -306,7 +308,7 @@ public class InternalEngineTests extends ESTestCase {
EngineConfig config = new EngineConfig(openMode, shardId, threadPool, indexSettings, null, store, createSnapshotDeletionPolicy(),
mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), listener,
new TranslogHandler(shardId.getIndexName(), logger), IndexSearcher.getDefaultQueryCache(),
- IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), null);
+ IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), null, maxUnsafeAutoIdTimestamp);
return config;
}
@@ -903,7 +905,7 @@ public class InternalEngineTests extends ESTestCase {
public void testSyncedFlush() throws IOException {
try (Store store = createStore();
Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(),
- new LogByteSizeMergePolicy()))) {
+ new LogByteSizeMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.index(new Engine.Index(newUid("1"), doc));
@@ -930,7 +932,7 @@ public class InternalEngineTests extends ESTestCase {
for (int i = 0; i < iters; i++) {
try (Store store = createStore();
InternalEngine engine = new InternalEngine(config(defaultSettings, store, createTempDir(),
- new LogDocMergePolicy()))) {
+ new LogDocMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
Engine.Index doc1 = new Engine.Index(newUid("1"), doc);
@@ -1158,7 +1160,7 @@ public class InternalEngineTests extends ESTestCase {
public void testForceMerge() throws IOException {
try (Store store = createStore();
Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(),
- new LogByteSizeMergePolicy()))) { // use log MP here we test some behavior in ESMP
+ new LogByteSizeMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) { // use log MP here we test some behavior in ESMP
int numDocs = randomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null);
@@ -1601,7 +1603,7 @@ public class InternalEngineTests extends ESTestCase {
public void testEnableGcDeletes() throws Exception {
try (Store store = createStore();
- Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), newMergePolicy()))) {
+ Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) {
engine.config().setEnableGcDeletes(false);
// Add document
@@ -1737,7 +1739,7 @@ public class InternalEngineTests extends ESTestCase {
// expected
}
// now it should be OK.
- EngineConfig config = copy(config(defaultSettings, store, primaryTranslogDir, newMergePolicy()), EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG);
+ EngineConfig config = copy(config(defaultSettings, store, primaryTranslogDir, newMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP), EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG);
engine = new InternalEngine(config);
}
@@ -2057,7 +2059,7 @@ public class InternalEngineTests extends ESTestCase {
config.getIndexSettings(), null, store, createSnapshotDeletionPolicy(), newMergePolicy(), config.getAnalyzer(),
config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), config.getTranslogRecoveryPerformer(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig,
- TimeValue.timeValueMinutes(5), config.getRefreshListeners());
+ TimeValue.timeValueMinutes(5), config.getRefreshListeners(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
try {
InternalEngine internalEngine = new InternalEngine(brokenConfig);
@@ -2112,7 +2114,7 @@ public class InternalEngineTests extends ESTestCase {
public void testCurrentTranslogIDisCommitted() throws IOException {
try (Store store = createStore()) {
- EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy());
+ EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
// create
{
@@ -2374,6 +2376,22 @@ public class InternalEngineTests extends ESTestCase {
assertTrue(engine.indexWriterHasDeletions());
}
+ public void testEngineMaxTimestampIsInitialized() throws IOException {
+ try (Store store = createStore();
+ Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE,
+ IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP))) {
+ assertEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp());
+
+ }
+
+ long maxTimestamp = Math.abs(randomLong());
+ try (Store store = createStore();
+ Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE,
+ maxTimestamp))) {
+ assertEquals(maxTimestamp, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp());
+ }
+ }
+
public void testAppendConcurrently() throws InterruptedException, IOException {
Thread[] thread = new Thread[randomIntBetween(3, 5)];
int numDocs = randomIntBetween(1000, 10000);
diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java
index 7d2c6d6de41..6dea774f258 100644
--- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java
+++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java
@@ -38,6 +38,7 @@ import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
+import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
@@ -247,7 +248,7 @@ public class ShadowEngineTests extends ESTestCase {
EngineConfig config = new EngineConfig(openMode, shardId, threadPool, indexSettings, null, store, createSnapshotDeletionPolicy(),
mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), eventListener, null,
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig,
- TimeValue.timeValueMinutes(5), refreshListeners);
+ TimeValue.timeValueMinutes(5), refreshListeners, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
return config;
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
index be77ba00734..ce49f18ccfc 100644
--- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
@@ -26,15 +26,16 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
+import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
+import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PhraseQuery;
+import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery;
-import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.SynonymQuery;
-import org.apache.lucene.search.PrefixQuery;
-import org.apache.lucene.search.MultiTermQuery;
+import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.elasticsearch.common.unit.Fuzziness;
@@ -390,6 +391,32 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0);
+
+ int length = randomIntBetween(1, 10);
+ StringBuilder queryString = new StringBuilder();
+ for (int i = 0; i < length; i++) {
+ queryString.append("a");
+ }
+ queryString.append("~");
+
+ int expectedEdits;
+ if (length <= 2) {
+ expectedEdits = 0;
+ } else if (3 <= length && length <= 5) {
+ expectedEdits = 1;
+ } else {
+ expectedEdits = 2;
+ }
+
+ Query query = queryStringQuery(queryString.toString()).defaultField(STRING_FIELD_NAME).fuzziness(Fuzziness.AUTO)
+ .toQuery(createShardContext());
+ assertThat(query, instanceOf(FuzzyQuery.class));
+ FuzzyQuery fuzzyQuery = (FuzzyQuery) query;
+ assertEquals(expectedEdits, fuzzyQuery.getMaxEdits());
+ }
+
public void testFuzzyNumeric() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryStringQueryBuilder query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME);
diff --git a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java
index a3bfa957def..ec794091a42 100644
--- a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java
@@ -110,7 +110,7 @@ import static org.hamcrest.Matchers.equalTo;
public abstract class ESIndexLevelReplicationTestCase extends ESTestCase {
protected ThreadPool threadPool;
- private final Index index = new Index("test", "uuid");
+ protected final Index index = new Index("test", "uuid");
private final ShardId shardId = new ShardId(index, 0);
private final Map indexMapping = Collections.singletonMap("type", "{ \"type\": {} }");
protected static final PeerRecoveryTargetService.RecoveryListener recoveryListener = new PeerRecoveryTargetService.RecoveryListener() {
@@ -284,8 +284,7 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase {
primary.recoverFromStore();
primary.updateRoutingEntry(ShardRoutingHelper.moveToStarted(primary.routingEntry()));
for (IndexShard replicaShard : replicas) {
- recoverReplica(replicaShard,
- (replica, sourceNode) -> new RecoveryTarget(replica, sourceNode, recoveryListener, version -> {}));
+ recoverReplica(replicaShard);
}
}
@@ -294,6 +293,11 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase {
replicas.add(replica);
return replica;
}
+
+ public void recoverReplica(IndexShard replica) throws IOException {
+ recoverReplica(replica, (r, sourceNode) -> new RecoveryTarget(r, sourceNode, recoveryListener, version -> {}));
+ }
+
public void recoverReplica(IndexShard replica, BiFunction targetSupplier)
throws IOException {
recoverReplica(replica, targetSupplier, true);
diff --git a/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java
index 3bbd8c3bbcc..407b374a92e 100644
--- a/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java
+++ b/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java
@@ -18,9 +18,13 @@
*/
package org.elasticsearch.index.replication;
+import org.elasticsearch.action.DocWriteResponse;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.InternalEngine;
import org.elasticsearch.index.engine.InternalEngineTests;
+import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardTests;
import org.elasticsearch.index.store.Store;
@@ -96,4 +100,25 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
}
}
+ public void testInheritMaxValidAutoIDTimestampOnRecovery() throws Exception {
+ try (ReplicationGroup shards = createGroup(0)) {
+ shards.startAll();
+ final IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}");
+ indexRequest.onRetry(); // force an update of the timestamp
+ final IndexResponse response = shards.index(indexRequest);
+ assertEquals(DocWriteResponse.Result.CREATED, response.getResult());
+ if (randomBoolean()) { // lets check if that also happens if no translog record is replicated
+ shards.flush();
+ }
+ IndexShard replica = shards.addReplica();
+ shards.recoverReplica(replica);
+
+ SegmentsStats segmentsStats = replica.segmentStats(false);
+ SegmentsStats primarySegmentStats = shards.getPrimary().segmentStats(false);
+ assertNotEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, primarySegmentStats.getMaxUnsafeAutoIdTimestamp());
+ assertEquals(primarySegmentStats.getMaxUnsafeAutoIdTimestamp(), segmentsStats.getMaxUnsafeAutoIdTimestamp());
+ assertNotEquals(Long.MAX_VALUE, segmentsStats.getMaxUnsafeAutoIdTimestamp());
+ }
+ }
+
}
diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
index 3c7b09a980d..b4725a8506d 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
@@ -1611,7 +1611,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
operations.add(new Translog.Index("testtype", "1", BytesReference.toBytes(jsonBuilder().startObject().field("foo", "bar").endObject().bytes())));
newShard.prepareForIndexRecovery();
newShard.recoveryState().getTranslog().totalOperations(operations.size());
- newShard.skipTranslogRecovery();
+ newShard.skipTranslogRecovery(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
newShard.performBatchRecovery(operations);
assertFalse(newShard.getTranslog().syncNeeded());
}
@@ -1668,7 +1668,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
List operations = new ArrayList<>();
operations.add(new Translog.Index("testtype", "1", BytesReference.toBytes(jsonBuilder().startObject().field("foo", "bar").endObject().bytes())));
newShard.prepareForIndexRecovery();
- newShard.skipTranslogRecovery();
+ newShard.skipTranslogRecovery(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
// Shard is still inactive since we haven't started recovering yet
assertFalse(newShard.isActive());
newShard.performBatchRecovery(operations);
diff --git a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java
index 20fd02b5163..f3f15b2639c 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java
@@ -29,6 +29,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
+import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@@ -36,7 +37,6 @@ import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
-import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.codec.CodecService;
@@ -66,7 +66,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
-import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
@@ -125,7 +124,7 @@ public class RefreshListenersTests extends ESTestCase {
store, new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()), newMergePolicy(), iwc.getAnalyzer(),
iwc.getSimilarity(), new CodecService(null, logger), eventListener, new TranslogHandler(shardId.getIndexName(), logger),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig,
- TimeValue.timeValueMinutes(5), listeners);
+ TimeValue.timeValueMinutes(5), listeners, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
engine = new InternalEngine(config);
}
diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java
index 82b264ae1d6..131593cd114 100644
--- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java
+++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmInfoTests.java
@@ -57,5 +57,4 @@ public class JvmInfoTests extends ESTestCase {
final int index = argline.lastIndexOf(flag);
return argline.charAt(index - 1) == '+';
}
-
}
diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java
index 2a80fe39732..2e085a80700 100644
--- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java
+++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java
@@ -32,23 +32,27 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class OsProbeTests extends ESTestCase {
- OsProbe probe = OsProbe.getInstance();
+ private final OsProbe probe = OsProbe.getInstance();
public void testOsInfo() {
- OsInfo info = probe.osInfo();
+ int allocatedProcessors = randomIntBetween(1, Runtime.getRuntime().availableProcessors());
+ long refreshInterval = randomBoolean() ? -1 : randomPositiveLong();
+ OsInfo info = probe.osInfo(refreshInterval, allocatedProcessors);
assertNotNull(info);
- assertThat(info.getRefreshInterval(), anyOf(equalTo(-1L), greaterThanOrEqualTo(0L)));
- assertThat(info.getName(), equalTo(Constants.OS_NAME));
- assertThat(info.getArch(), equalTo(Constants.OS_ARCH));
- assertThat(info.getVersion(), equalTo(Constants.OS_VERSION));
- assertThat(info.getAvailableProcessors(), equalTo(Runtime.getRuntime().availableProcessors()));
+ assertEquals(refreshInterval, info.getRefreshInterval());
+ assertEquals(Constants.OS_NAME, info.getName());
+ assertEquals(Constants.OS_ARCH, info.getArch());
+ assertEquals(Constants.OS_VERSION, info.getVersion());
+ assertEquals(allocatedProcessors, info.getAllocatedProcessors());
+ assertEquals(Runtime.getRuntime().availableProcessors(), info.getAvailableProcessors());
}
public void testOsStats() {
OsStats stats = probe.osStats();
assertNotNull(stats);
assertThat(stats.getTimestamp(), greaterThan(0L));
- assertThat(stats.getCpu().getPercent(), anyOf(equalTo((short) -1), is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100)))));
+ assertThat(stats.getCpu().getPercent(), anyOf(equalTo((short) -1),
+ is(both(greaterThanOrEqualTo((short) 0)).and(lessThanOrEqualTo((short) 100)))));
double[] loadAverage = stats.getCpu().getLoadAverage();
if (loadAverage != null) {
assertThat(loadAverage.length, equalTo(3));
diff --git a/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java
index 8e6016f6f98..5423242ccd8 100644
--- a/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java
+++ b/core/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java
@@ -33,14 +33,15 @@ import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class ProcessProbeTests extends ESTestCase {
- ProcessProbe probe = ProcessProbe.getInstance();
+ private final ProcessProbe probe = ProcessProbe.getInstance();
public void testProcessInfo() {
- ProcessInfo info = probe.processInfo();
+ long refreshInterval = randomPositiveLong();
+ ProcessInfo info = probe.processInfo(refreshInterval);
assertNotNull(info);
- assertThat(info.getRefreshInterval(), greaterThanOrEqualTo(0L));
- assertThat(info.getId(), equalTo(jvmInfo().pid()));
- assertThat(info.isMlockall(), equalTo(BootstrapInfo.isMemoryLocked()));
+ assertEquals(refreshInterval, info.getRefreshInterval());
+ assertEquals(jvmInfo().pid(), info.getId());
+ assertEquals(BootstrapInfo.isMemoryLocked(), info.isMlockall());
}
public void testProcessStats() {
diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java
index 7cd4e355218..d9134ba5cf3 100644
--- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java
+++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java
@@ -20,7 +20,6 @@
package org.elasticsearch.nodesinfo;
import org.elasticsearch.Build;
-import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
import org.elasticsearch.cluster.node.DiscoveryNode;
@@ -35,11 +34,11 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.http.HttpInfo;
import org.elasticsearch.ingest.IngestInfo;
+import org.elasticsearch.ingest.ProcessorInfo;
import org.elasticsearch.monitor.jvm.JvmInfo;
-import org.elasticsearch.monitor.os.DummyOsInfo;
import org.elasticsearch.monitor.os.OsInfo;
import org.elasticsearch.monitor.process.ProcessInfo;
-import org.elasticsearch.plugins.DummyPluginInfo;
+import org.elasticsearch.plugins.PluginInfo;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.threadpool.ThreadPool;
@@ -48,7 +47,6 @@ import org.elasticsearch.transport.TransportInfo;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -58,25 +56,20 @@ import static java.util.Collections.emptySet;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.core.IsEqual.equalTo;
-/**
- *
- */
public class NodeInfoStreamingTests extends ESTestCase {
public void testNodeInfoStreaming() throws IOException {
NodeInfo nodeInfo = createNodeInfo();
- Version version = Version.CURRENT;
- BytesStreamOutput out = new BytesStreamOutput();
- out.setVersion(version);
- nodeInfo.writeTo(out);
- out.close();
- StreamInput in = out.bytes().streamInput();
- in.setVersion(version);
- NodeInfo readNodeInfo = NodeInfo.readNodeInfo(in);
- assertExpectedUnchanged(nodeInfo, readNodeInfo);
-
+ try (BytesStreamOutput out = new BytesStreamOutput()) {
+ nodeInfo.writeTo(out);
+ try (StreamInput in = out.bytes().streamInput()) {
+ NodeInfo readNodeInfo = NodeInfo.readNodeInfo(in);
+ assertExpectedUnchanged(nodeInfo, readNodeInfo);
+ }
+ }
}
- // checks all properties that are expected to be unchanged. Once we start changing them between versions this method has to be changed as well
+ // checks all properties that are expected to be unchanged.
+ // Once we start changing them between versions this method has to be changed as well
private void assertExpectedUnchanged(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException {
assertThat(nodeInfo.getBuild().toString(), equalTo(readNodeInfo.getBuild().toString()));
assertThat(nodeInfo.getHostname(), equalTo(readNodeInfo.getHostname()));
@@ -89,24 +82,15 @@ public class NodeInfoStreamingTests extends ESTestCase {
compareJsonOutput(nodeInfo.getTransport(), readNodeInfo.getTransport());
compareJsonOutput(nodeInfo.getNode(), readNodeInfo.getNode());
compareJsonOutput(nodeInfo.getOs(), readNodeInfo.getOs());
- comparePluginsAndModules(nodeInfo, readNodeInfo);
+ compareJsonOutput(nodeInfo.getPlugins(), readNodeInfo.getPlugins());
compareJsonOutput(nodeInfo.getIngest(), readNodeInfo.getIngest());
}
- private void comparePluginsAndModules(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException {
- ToXContent.Params params = ToXContent.EMPTY_PARAMS;
- XContentBuilder pluginsAndModules = jsonBuilder();
- pluginsAndModules.startObject();
- nodeInfo.getPlugins().toXContent(pluginsAndModules, params);
- pluginsAndModules.endObject();
- XContentBuilder readPluginsAndModules = jsonBuilder();
- readPluginsAndModules.startObject();
- readNodeInfo.getPlugins().toXContent(readPluginsAndModules, params);
- readPluginsAndModules.endObject();
- assertThat(pluginsAndModules.string(), equalTo(readPluginsAndModules.string()));
- }
-
private void compareJsonOutput(ToXContent param1, ToXContent param2) throws IOException {
+ if (param1 == null) {
+ assertNull(param2);
+ return;
+ }
ToXContent.Params params = ToXContent.EMPTY_PARAMS;
XContentBuilder param1Builder = jsonBuilder();
param1Builder.startObject();
@@ -120,36 +104,73 @@ public class NodeInfoStreamingTests extends ESTestCase {
assertThat(param1Builder.string(), equalTo(param2Builder.string()));
}
- private NodeInfo createNodeInfo() {
+ private static NodeInfo createNodeInfo() {
Build build = Build.CURRENT;
DiscoveryNode node = new DiscoveryNode("test_node", LocalTransportAddress.buildUnique(),
emptyMap(), emptySet(), VersionUtils.randomVersion(random()));
- Map serviceAttributes = new HashMap<>();
- serviceAttributes.put("test", "attribute");
- Settings settings = Settings.builder().put("test", "setting").build();
- OsInfo osInfo = DummyOsInfo.INSTANCE;
- ProcessInfo process = new ProcessInfo(randomInt(), randomBoolean());
- JvmInfo jvm = JvmInfo.jvmInfo();
- List threadPoolInfos = new ArrayList<>();
- threadPoolInfos.add(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5));
- ThreadPoolInfo threadPoolInfo = new ThreadPoolInfo(threadPoolInfos);
+ Settings settings = randomBoolean() ? null : Settings.builder().put("test", "setting").build();
+ OsInfo osInfo = null;
+ if (randomBoolean()) {
+ int availableProcessors = randomIntBetween(1, 64);
+ int allocatedProcessors = randomIntBetween(1, availableProcessors);
+ long refreshInterval = randomBoolean() ? -1 : randomPositiveLong();
+ String name = randomAsciiOfLengthBetween(3, 10);
+ String arch = randomAsciiOfLengthBetween(3, 10);
+ String version = randomAsciiOfLengthBetween(3, 10);
+ osInfo = new OsInfo(refreshInterval, availableProcessors, allocatedProcessors, name, arch, version);
+ }
+ ProcessInfo process = randomBoolean() ? null : new ProcessInfo(randomInt(), randomBoolean(), randomPositiveLong());
+ JvmInfo jvm = randomBoolean() ? null : JvmInfo.jvmInfo();
+ ThreadPoolInfo threadPoolInfo = null;
+ if (randomBoolean()) {
+ int numThreadPools = randomIntBetween(1, 10);
+ List threadPoolInfos = new ArrayList<>(numThreadPools);
+ for (int i = 0; i < numThreadPools; i++) {
+ threadPoolInfos.add(new ThreadPool.Info(randomAsciiOfLengthBetween(3, 10),
+ randomFrom(ThreadPool.ThreadPoolType.values()), randomInt()));
+ }
+ threadPoolInfo = new ThreadPoolInfo(threadPoolInfos);
+ }
Map profileAddresses = new HashMap<>();
- BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(new TransportAddress[]{LocalTransportAddress.buildUnique()}, LocalTransportAddress.buildUnique());
+ BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(
+ new TransportAddress[]{LocalTransportAddress.buildUnique()}, LocalTransportAddress.buildUnique());
profileAddresses.put("test_address", dummyBoundTransportAddress);
- TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses);
- HttpInfo htttpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong());
- PluginsAndModules plugins = new PluginsAndModules();
- plugins.addModule(DummyPluginInfo.INSTANCE);
- plugins.addPlugin(DummyPluginInfo.INSTANCE);
- IngestInfo ingestInfo = new IngestInfo(Collections.emptyList());
- ByteSizeValue indexingBuffer;
- if (random().nextBoolean()) {
- indexingBuffer = null;
- } else {
+ TransportInfo transport = randomBoolean() ? null : new TransportInfo(dummyBoundTransportAddress, profileAddresses);
+ HttpInfo httpInfo = randomBoolean() ? null : new HttpInfo(dummyBoundTransportAddress, randomLong());
+
+ PluginsAndModules pluginsAndModules = null;
+ if (randomBoolean()) {
+ int numPlugins = randomIntBetween(0, 5);
+ List plugins = new ArrayList<>();
+ for (int i = 0; i < numPlugins; i++) {
+ plugins.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10),
+ randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10)));
+ }
+ int numModules = randomIntBetween(0, 5);
+ List modules = new ArrayList<>();
+ for (int i = 0; i < numModules; i++) {
+ modules.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10),
+ randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10)));
+ }
+ pluginsAndModules = new PluginsAndModules(plugins, modules);
+ }
+
+ IngestInfo ingestInfo = null;
+ if (randomBoolean()) {
+ int numProcessors = randomIntBetween(0, 5);
+ List processors = new ArrayList<>(numProcessors);
+ for (int i = 0; i < numProcessors; i++) {
+ processors.add(new ProcessorInfo(randomAsciiOfLengthBetween(3, 10)));
+ }
+ ingestInfo = new IngestInfo(processors);
+ }
+
+ ByteSizeValue indexingBuffer = null;
+ if (randomBoolean()) {
// pick a random long that sometimes exceeds an int:
indexingBuffer = new ByteSizeValue(random().nextLong() & ((1L<<40)-1));
}
return new NodeInfo(VersionUtils.randomVersion(random()), build, node, settings, osInfo, process, jvm,
- threadPoolInfo, transport, htttpInfo, plugins, ingestInfo, indexingBuffer);
+ threadPoolInfo, transport, httpInfo, pluginsAndModules, ingestInfo, indexingBuffer);
}
}
diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java
index 73b31b92637..4ad52be8866 100644
--- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java
+++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java
@@ -23,8 +23,9 @@ import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
import org.elasticsearch.test.ESTestCase;
-import java.nio.file.Files;
import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -201,15 +202,17 @@ public class PluginInfoTests extends ESTestCase {
}
public void testPluginListSorted() {
- PluginsAndModules pluginsInfo = new PluginsAndModules();
- pluginsInfo.addPlugin(new PluginInfo("c", "foo", "dummy", "dummyclass"));
- pluginsInfo.addPlugin(new PluginInfo("b", "foo", "dummy", "dummyclass"));
- pluginsInfo.addPlugin(new PluginInfo("e", "foo", "dummy", "dummyclass"));
- pluginsInfo.addPlugin(new PluginInfo("a", "foo", "dummy", "dummyclass"));
- pluginsInfo.addPlugin(new PluginInfo("d", "foo", "dummy", "dummyclass"));
+ List plugins = new ArrayList<>();
+ plugins.add(new PluginInfo("c", "foo", "dummy", "dummyclass"));
+ plugins.add(new PluginInfo("b", "foo", "dummy", "dummyclass"));
+ plugins.add(new PluginInfo("e", "foo", "dummy", "dummyclass"));
+ plugins.add(new PluginInfo("a", "foo", "dummy", "dummyclass"));
+ plugins.add(new PluginInfo("d", "foo", "dummy", "dummyclass"));
+ PluginsAndModules pluginsInfo = new PluginsAndModules(plugins, Collections.emptyList());
+
final List infos = pluginsInfo.getPluginInfos();
- List names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList());
+ List names = infos.stream().map(PluginInfo::getName).collect(Collectors.toList());
assertThat(names, contains("a", "b", "c", "d", "e"));
}
}
diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java
index de188ba9e91..9b7d4073d0d 100644
--- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java
+++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java
@@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
@@ -90,14 +89,10 @@ public class RestAnalyzeActionTests extends ESTestCase {
public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception {
AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
-
- try {
- RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
- fail("shouldn't get here");
- } catch (Exception e) {
- assertThat(e, instanceOf(IllegalArgumentException.class));
- assertThat(e.getMessage(), equalTo("Failed to parse request body"));
- }
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+ () -> RestAnalyzeAction.buildFromContent(
+ new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)));
+ assertThat(e.getMessage(), equalTo("Failed to parse request body"));
}
public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception {
@@ -107,14 +102,9 @@ public class RestAnalyzeActionTests extends ESTestCase {
.field("text", "THIS IS A TEST")
.field("unknown", "keyword")
.endObject().bytes();
-
- try {
- RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
- fail("shouldn't get here");
- } catch (Exception e) {
- assertThat(e, instanceOf(IllegalArgumentException.class));
- assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
- }
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+ () -> RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)));
+ assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
}
public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception {
@@ -123,64 +113,57 @@ public class RestAnalyzeActionTests extends ESTestCase {
.startObject()
.field("explain", "fals")
.endObject().bytes();
- try {
- RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
- fail("shouldn't get here");
- } catch (Exception e) {
- assertThat(e, instanceOf(IllegalArgumentException.class));
- assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'"));
- }
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+ () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)));
+ assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'"));
}
public void testDeprecatedParamException() throws Exception {
- BytesReference content = XContentFactory.jsonBuilder()
- .startObject()
- .field("text", "THIS IS A TEST")
- .field("tokenizer", "keyword")
- .array("filters", "lowercase")
- .endObject().bytes();
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+ () -> RestAnalyzeAction.buildFromContent(
+ XContentFactory.jsonBuilder()
+ .startObject()
+ .field("text", "THIS IS A TEST")
+ .field("tokenizer", "keyword")
+ .array("filters", "lowercase")
+ .endObject().bytes(),
+ new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY)));
+ assertThat(e.getMessage(), startsWith("Unknown parameter [filters]"));
- AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
- try {
- RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
- } catch (Exception e) {
- assertThat(e, instanceOf(IllegalArgumentException.class));
- assertThat(e.getMessage(), startsWith("Unknown parameter [filters]"));
- }
+ e = expectThrows(IllegalArgumentException.class,
+ () -> RestAnalyzeAction.buildFromContent(
+ XContentFactory.jsonBuilder()
+ .startObject()
+ .field("text", "THIS IS A TEST")
+ .field("tokenizer", "keyword")
+ .array("token_filters", "lowercase")
+ .endObject().bytes(),
+ new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY)));
+ assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]"));
- content = XContentFactory.jsonBuilder()
- .startObject()
- .field("text", "THIS IS A TEST")
- .field("tokenizer", "keyword")
- .array("token_filters", "lowercase")
- .endObject().bytes();
- analyzeRequest = new AnalyzeRequest("for test");
-
- try {
- RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
- } catch (Exception e) {
- assertThat(e, instanceOf(IllegalArgumentException.class));
- assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]"));
- }
-
- content = XContentFactory.jsonBuilder()
- .startObject()
- .field("text", "THIS IS A TEST")
- .field("tokenizer", "keyword")
- .array("char_filters", "lowercase")
- .endObject().bytes();
-
- analyzeRequest = new AnalyzeRequest("for test");
-
- try {
- RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
- } catch (Exception e) {
- assertThat(e, instanceOf(IllegalArgumentException.class));
- assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]"));
- }
+ e = expectThrows(IllegalArgumentException.class,
+ () -> RestAnalyzeAction.buildFromContent(
+ XContentFactory.jsonBuilder()
+ .startObject()
+ .field("text", "THIS IS A TEST")
+ .field("tokenizer", "keyword")
+ .array("char_filters", "lowercase")
+ .endObject().bytes(),
+ new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY)));
+ assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]"));
+ e = expectThrows(IllegalArgumentException.class,
+ () -> RestAnalyzeAction.buildFromContent(
+ XContentFactory.jsonBuilder()
+ .startObject()
+ .field("text", "THIS IS A TEST")
+ .field("tokenizer", "keyword")
+ .array("token_filter", "lowercase")
+ .endObject().bytes()
+ , new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY)));
+ assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]"));
}
}
diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java
index 14cf10b8f31..bf0f5f6e606 100644
--- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java
+++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java
@@ -40,11 +40,8 @@ import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
-/**
- *
- */
public class ThreadPoolSerializationTests extends ESTestCase {
- BytesStreamOutput output = new BytesStreamOutput();
+ private final BytesStreamOutput output = new BytesStreamOutput();
private ThreadPool.ThreadPoolType threadPoolType;
@Before
@@ -54,13 +51,13 @@ public class ThreadPoolSerializationTests extends ESTestCase {
}
public void testThatQueueSizeSerializationWorks() throws Exception {
- ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("10k"));
+ ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10,
+ TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("10k"));
output.setVersion(Version.CURRENT);
info.writeTo(output);
StreamInput input = output.bytes().streamInput();
- ThreadPool.Info newInfo = new ThreadPool.Info();
- newInfo.readFrom(input);
+ ThreadPool.Info newInfo = new ThreadPool.Info(input);
assertThat(newInfo.getQueueSize().singles(), is(10000L));
}
@@ -71,8 +68,7 @@ public class ThreadPoolSerializationTests extends ESTestCase {
info.writeTo(output);
StreamInput input = output.bytes().streamInput();
- ThreadPool.Info newInfo = new ThreadPool.Info();
- newInfo.readFrom(input);
+ ThreadPool.Info newInfo = new ThreadPool.Info(input);
assertThat(newInfo.getQueueSize(), is(nullValue()));
}
@@ -103,7 +99,8 @@ public class ThreadPoolSerializationTests extends ESTestCase {
}
public void testThatToXContentWritesInteger() throws Exception {
- ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("1k"));
+ ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10,
+ TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("1k"));
XContentBuilder builder = jsonBuilder();
builder.startObject();
info.toXContent(builder, ToXContent.EMPTY_PARAMS);
@@ -126,8 +123,7 @@ public class ThreadPoolSerializationTests extends ESTestCase {
info.writeTo(output);
StreamInput input = output.bytes().streamInput();
- ThreadPool.Info newInfo = new ThreadPool.Info();
- newInfo.readFrom(input);
+ ThreadPool.Info newInfo = new ThreadPool.Info(input);
assertThat(newInfo.getThreadPoolType(), is(threadPoolType));
}
diff --git a/docs/plugins/lang-javascript.asciidoc b/docs/plugins/lang-javascript.asciidoc
index 4bc6ba94ed2..650f42d9cab 100644
--- a/docs/plugins/lang-javascript.asciidoc
+++ b/docs/plugins/lang-javascript.asciidoc
@@ -1,7 +1,7 @@
[[lang-javascript]]
=== JavaScript Language Plugin
-deprecated[5.0.0,JavaScript will be replaced by the new scripting language <>]
+deprecated[5.0.0,JavaScript will be replaced by the new scripting language {ref}/modules-scripting-painless.html[`Painless`]]
The JavaScript language plugin enables the use of JavaScript in Elasticsearch
scripts, via Mozilla's
diff --git a/docs/plugins/lang-python.asciidoc b/docs/plugins/lang-python.asciidoc
index 4ad0fe628cb..fcec532ea10 100644
--- a/docs/plugins/lang-python.asciidoc
+++ b/docs/plugins/lang-python.asciidoc
@@ -1,7 +1,7 @@
[[lang-python]]
=== Python Language Plugin
-deprecated[5.0.0,Python will be replaced by the new scripting language <>]
+deprecated[5.0.0,Python will be replaced by the new scripting language {ref}/modules-scripting-painless.html[`Painless`]]
The Python language plugin enables the use of Python in Elasticsearch
scripts, via the http://www.jython.org/[Jython] Java implementation of Python.
diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc
index bb5a367a04c..50187af5b28 100644
--- a/docs/reference/how-to/indexing-speed.asciidoc
+++ b/docs/reference/how-to/indexing-speed.asciidoc
@@ -67,6 +67,15 @@ The filesystem cache will be used in order to buffer I/O operations. You should
make sure to give at least half the memory of the machine running elasticsearch
to the filesystem cache.
+[float]
+=== Use auto-generated ids
+
+When indexing a document that has an explicit id, elasticsearch needs to check
+whether a document with the same id already exists within the same shard, which
+is a costly operation and gets even more costly as the index grows. By using
+auto-generated ids, Elasticsearch can skip this check, which makes indexing
+faster.
+
[float]
=== Use faster hardware
diff --git a/docs/reference/how-to/search-speed.asciidoc b/docs/reference/how-to/search-speed.asciidoc
index 67848c9edca..2d0525a48e8 100644
--- a/docs/reference/how-to/search-speed.asciidoc
+++ b/docs/reference/how-to/search-speed.asciidoc
@@ -140,6 +140,124 @@ being mapped as <> rather than `integer` or `long`.
In general, scripts should be avoided. If they are absolutely needed, you
should prefer the `painless` and `expressions` engines.
+[float]
+=== Search rounded dates
+
+Queries on date fields that use `now` are typically not cacheable since the
+range that is being matched changes all the time. However switching to a
+rounded date is often acceptable in terms of user experience, and has the
+benefit of making better use of the query cache.
+
+For instance the below query:
+
+[source,js]
+--------------------------------------------------
+PUT index/type/1
+{
+ "my_date": "2016-05-11T16:30:55.328Z"
+}
+
+GET index/_search
+{
+ "query": {
+ "constant_score": {
+ "filter": {
+ "range": {
+ "my_date": {
+ "gte": "now-1h",
+ "lte": "now"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+
+could be replaced with the following query:
+
+[source,js]
+--------------------------------------------------
+GET index/_search
+{
+ "query": {
+ "constant_score": {
+ "filter": {
+ "range": {
+ "my_date": {
+ "gte": "now-1h/m",
+ "lte": "now/m"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[continued]
+
+In that case we rounded to the minute, so if the current time is `16:31:29`,
+the range query will match everything whose value of the `my_date` field is
+between `15:31:00` and `16:31:59`. And if several users run a query that
+contains this range in the same minute, the query cache could help speed things
+up a bit. The longer the interval that is used for rounding, the more the query
+cache can help, but beware that too aggressive rounding might also hurt user
+experience.
+
+
+NOTE: It might be tempting to split ranges into a large cacheable part and
+smaller not cacheable parts in order to be able to leverage the query cache,
+as shown below:
+
+[source,js]
+--------------------------------------------------
+GET index/_search
+{
+ "query": {
+ "constant_score": {
+ "filter": {
+ "bool": {
+ "should": [
+ {
+ "range": {
+ "my_date": {
+ "gte": "now-1h",
+ "lte": "now-1h/m"
+ }
+ }
+ },
+ {
+ "range": {
+ "my_date": {
+ "gt": "now-1h/m",
+ "lt": "now/m"
+ }
+ }
+ },
+ {
+ "range": {
+ "my_date": {
+ "gte": "now/m",
+ "lte": "now"
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[continued]
+
+However such practice might make the query run slower in some cases since the
+overhead introduced by the `bool` query may defeat the savings from better
+leveraging the query cache.
+
[float]
=== Force-merge read-only indices
diff --git a/docs/reference/images/lambda_calc.png b/docs/reference/images/lambda_calc.png
index 2d7f8bbb8db..4fd19a2660f 100644
Binary files a/docs/reference/images/lambda_calc.png and b/docs/reference/images/lambda_calc.png differ
diff --git a/docs/reference/images/sigma_calc.png b/docs/reference/images/sigma_calc.png
index d794c0a69d0..9001bbe9eaf 100644
Binary files a/docs/reference/images/sigma_calc.png and b/docs/reference/images/sigma_calc.png differ
diff --git a/docs/reference/indices/analyze.asciidoc b/docs/reference/indices/analyze.asciidoc
index 5516d898813..dbb2c8f101a 100644
--- a/docs/reference/indices/analyze.asciidoc
+++ b/docs/reference/indices/analyze.asciidoc
@@ -43,13 +43,13 @@ curl -XGET 'localhost:9200/_analyze' -d '
curl -XGET 'localhost:9200/_analyze' -d '
{
"tokenizer" : "keyword",
- "token_filter" : ["lowercase"],
+ "filter" : ["lowercase"],
"char_filter" : ["html_strip"],
"text" : "this is a test"
}'
--------------------------------------------------
-deprecated[5.0.0, Use `filter`/`token_filter`/`char_filter` instead of `filters`/`token_filters`/`char_filters`]
+deprecated[5.0.0, Use `filter`/`char_filter` instead of `filters`/`char_filters` and `token_filters` has been removed]
Custom tokenizers, token filters, and character filters can be specified in the request body as follows:
@@ -112,7 +112,7 @@ provided it doesn't start with `{` :
[source,js]
--------------------------------------------------
-curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&token_filter=lowercase&char_filter=html_strip' -d 'this is a test'
+curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&filter=lowercase&char_filter=html_strip' -d 'this is a test'
--------------------------------------------------
=== Explain Analyze
diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc
index 5e3f744ebaf..6ee28e7b2f4 100644
--- a/docs/reference/indices/rollover-index.asciidoc
+++ b/docs/reference/indices/rollover-index.asciidoc
@@ -55,9 +55,9 @@ The above request might return the following response:
}
--------------------------------------------------
// TESTRESPONSE
- <1> Whether the index was rolled over.
- <2> Whether the rollover was dry run.
- <3> The result of each condition.
+<1> Whether the index was rolled over.
+<2> Whether the rollover was dry run.
+<3> The result of each condition.
[float]
=== Naming the new index
diff --git a/docs/reference/indices/shrink-index.asciidoc b/docs/reference/indices/shrink-index.asciidoc
index 713f2149547..027cf8b924d 100644
--- a/docs/reference/indices/shrink-index.asciidoc
+++ b/docs/reference/indices/shrink-index.asciidoc
@@ -41,6 +41,8 @@ PUT /my_source_index/_settings
}
}
--------------------------------------------------
+// CONSOLE
+// TEST[s/^/PUT my_source_index\n/]
<1> Forces the relocation of a copy of each shard to the node with name
`shrink_node_name`. See <> for more options.
@@ -62,6 +64,8 @@ the following request:
--------------------------------------------------
POST my_source_index/_shrink/my_target_index
--------------------------------------------------
+// CONSOLE
+// TEST[continued]
The above request returns immediately once the target index has been added to
the cluster state -- it doesn't wait for the shrink operation to start.
@@ -105,6 +109,8 @@ POST my_source_index/_shrink/my_target_index
}
}
--------------------------------------------------
+// CONSOLE
+// TEST[s/^/PUT my_source_index\n{"settings": {"index.blocks.write": true}}\n/]
<1> The number of shards in the target index. This must be a factor of the
number of shards in the source index.
@@ -139,6 +145,6 @@ replicas and may decide to relocate the primary shard to another node.
[float]
=== Wait For Active Shards
-Because the shrink operation creates a new index to shrink the shards to,
-the <> setting
+Because the shrink operation creates a new index to shrink the shards to,
+the <> setting
on index creation applies to the shrink index action as well.
diff --git a/docs/reference/indices/stats.asciidoc b/docs/reference/indices/stats.asciidoc
index e990a7ff6bd..a95b1c81ae1 100644
--- a/docs/reference/indices/stats.asciidoc
+++ b/docs/reference/indices/stats.asciidoc
@@ -10,15 +10,18 @@ all indices:
[source,js]
--------------------------------------------------
-curl localhost:9200/_stats
+GET /_stats
--------------------------------------------------
+// CONSOLE
Specific index stats can be retrieved using:
[source,js]
--------------------------------------------------
-curl localhost:9200/index1,index2/_stats
+GET /index1,index2/_stats
--------------------------------------------------
+// CONSOLE
+// TEST[s/^/PUT index1\nPUT index2\n/]
By default, all stats are returned, returning only specific stats can be
specified as well in the URI. Those stats can be any of:
@@ -74,12 +77,14 @@ Here are some samples:
[source,js]
--------------------------------------------------
# Get back stats for merge and refresh only for all indices
-curl 'localhost:9200/_stats/merge,refresh'
+GET /_stats/merge,refresh
# Get back stats for type1 and type2 documents for the my_index index
-curl 'localhost:9200/my_index/_stats/indexing?types=type1,type2
+GET /my_index/_stats/indexing?types=type1,type2
# Get back just search stats for group1 and group2
-curl 'localhost:9200/_stats/search?groups=group1,group2
+GET /_stats/search?groups=group1,group2
--------------------------------------------------
+// CONSOLE
+// TEST[s/^/PUT my_index\n/]
The stats returned are aggregated on the index level, with
`primaries` and `total` aggregations, where `primaries` are the values for only the
@@ -91,4 +96,3 @@ Note, as shards move around the cluster, their stats will be cleared as
they are created on other nodes. On the other hand, even though a shard
"left" a node, that node will still retain the stats that shard
contributed to.
-
diff --git a/docs/reference/indices/templates.asciidoc b/docs/reference/indices/templates.asciidoc
index 754a93e3096..6e2f7ce91f1 100644
--- a/docs/reference/indices/templates.asciidoc
+++ b/docs/reference/indices/templates.asciidoc
@@ -38,6 +38,7 @@ PUT _template/template_1
}
--------------------------------------------------
// CONSOLE
+// TESTSETUP
Defines a template named template_1, with a template pattern of `te*`.
The settings and mappings will be applied to any index name that matches
@@ -47,7 +48,7 @@ It is also possible to include aliases in an index template as follows:
[source,js]
--------------------------------------------------
-curl -XPUT localhost:9200/_template/template_1 -d '
+PUT _template/template_1
{
"template" : "te*",
"settings" : {
@@ -64,8 +65,9 @@ curl -XPUT localhost:9200/_template/template_1 -d '
"{index}-alias" : {} <1>
}
}
-'
--------------------------------------------------
+// CONSOLE
+// TEST[s/^/DELETE _template\/template_1\n/]
<1> the `{index}` placeholder within the alias name will be replaced with the
actual index name that the template gets applied to during index creation.
@@ -79,8 +81,9 @@ Index templates are identified by a name (in the above case
[source,js]
--------------------------------------------------
-curl -XDELETE localhost:9200/_template/template_1
+DELETE /_template/template_1
--------------------------------------------------
+// CONSOLE
[float]
[[getting]]
@@ -91,24 +94,26 @@ Index templates are identified by a name (in the above case
[source,js]
--------------------------------------------------
-curl -XGET localhost:9200/_template/template_1
+GET /_template/template_1
--------------------------------------------------
+// CONSOLE
You can also match several templates by using wildcards like:
[source,js]
--------------------------------------------------
-curl -XGET localhost:9200/_template/temp*
-curl -XGET localhost:9200/_template/template_1,template_2
+GET /_template/temp*
+GET /_template/template_1,template_2
--------------------------------------------------
+// CONSOLE
To get list of all index templates you can run:
[source,js]
--------------------------------------------------
-curl -XGET localhost:9200/_template/
+GET /_template
--------------------------------------------------
-
+// CONSOLE
[float]
[[indices-templates-exists]]
@@ -118,13 +123,13 @@ Used to check if the template exists or not. For example:
[source,js]
-----------------------------------------------
-curl -XHEAD -i localhost:9200/_template/template_1
+HEAD _template/template_1
-----------------------------------------------
+// CONSOLE
The HTTP status code indicates if the template with the given name
exists or not. A status code `200` means it exists, a `404` it does not.
-
[float]
[[multiple-templates]]
=== Multiple Template Matching
@@ -137,7 +142,7 @@ orders overriding them. For example:
[source,js]
--------------------------------------------------
-curl -XPUT localhost:9200/_template/template_1 -d '
+PUT /_template/template_1
{
"template" : "*",
"order" : 0,
@@ -150,9 +155,8 @@ curl -XPUT localhost:9200/_template/template_1 -d '
}
}
}
-'
-curl -XPUT localhost:9200/_template/template_2 -d '
+PUT /_template/template_2
{
"template" : "te*",
"order" : 1,
@@ -165,8 +169,9 @@ curl -XPUT localhost:9200/_template/template_2 -d '
}
}
}
-'
--------------------------------------------------
+// CONSOLE
+// TEST[s/^/DELETE _template\/template_1\n/]
The above will disable storing the `_source` on all `type1` types, but
for indices of that start with `te*`, source will still be enabled.
diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc
index f0c0e9f6c13..a06e3b9e1df 100644
--- a/docs/reference/ingest/ingest-node.asciidoc
+++ b/docs/reference/ingest/ingest-node.asciidoc
@@ -1495,3 +1495,115 @@ Converts a string to its uppercase equivalent.
}
}
--------------------------------------------------
+
+[[dot-expand-processor]]
+=== Dot Expander Processor
+
+Expands a field with dots into an object field. This processor allows fields
+with dots in the name to be accessible by other processors in the pipeline.
+Otherwise these < can't be accessed by any processor.
+
+[[dot-expender-options]]
+.Dot Expand Options
+[options="header"]
+|======
+| Name | Required | Default | Description
+| `field` | yes | - | The field to expand into an object field
+| `path` | no | - | The field that contains the field to expand. Only required if the field to expand is part another object field, because the `field` option can only understand leaf fields.
+|======
+
+[source,js]
+--------------------------------------------------
+{
+ "dot_expander": {
+ "field": "foo.bar"
+ }
+}
+--------------------------------------------------
+
+For example the dot expand processor would turn this document:
+
+[source,js]
+--------------------------------------------------
+{
+ "foo.bar" : "value"
+}
+--------------------------------------------------
+
+into:
+
+[source,js]
+--------------------------------------------------
+{
+ "foo" : {
+ "bar" : "value"
+ }
+}
+--------------------------------------------------
+
+If there is already a `bar` field nested under `foo` then
+this processor merges the the `foo.bar` field into it. If the field is
+a scalar value then it will turn that field into an array field.
+
+For example, the following document:
+
+[source,js]
+--------------------------------------------------
+{
+ "foo.bar" : "value2",
+ "foo" : {
+ "bar" : "value1"
+ }
+}
+--------------------------------------------------
+
+is transformed by the `dot_expander` processor into:
+
+[source,js]
+--------------------------------------------------
+{
+ "foo" : {
+ "bar" : ["value1", "value2"]
+ }
+}
+--------------------------------------------------
+
+If any field outside of the leaf field conflicts with a pre-existing field of the same name,
+then that field needs to be renamed first.
+
+Consider the following document:
+
+[source,js]
+--------------------------------------------------
+{
+ "foo": "value1",
+ "foo.bar": "value2"
+}
+--------------------------------------------------
+
+Then the the `foo` needs to be renamed first before the `dot_expander`
+processor is applied. So in order for the `foo.bar` field to properly
+be expanded into the `bar` field under the `foo` field the following
+pipeline should be used:
+
+[source,js]
+--------------------------------------------------
+{
+ "processors" : [
+ {
+ "rename" : {
+ "field" : "foo",
+ "target_field" : "foo.bar""
+ }
+ },
+ {
+ "dot_expander": {
+ "field": "foo.bar"
+ }
+ }
+ ]
+}
+--------------------------------------------------
+
+The reason for this is that Ingest doesn't know how to automatically cast
+a scalar field to an object field.
\ No newline at end of file
diff --git a/docs/reference/migration/migrate_5_0/rest.asciidoc b/docs/reference/migration/migrate_5_0/rest.asciidoc
index 6df1341b0cc..afba8819ed3 100644
--- a/docs/reference/migration/migrate_5_0/rest.asciidoc
+++ b/docs/reference/migration/migrate_5_0/rest.asciidoc
@@ -67,8 +67,8 @@ removed in Elasticsearch 6.0.0.
==== Analyze API changes
-The deprecated `filters`/`token_filters`/`char_filters` parameter has been
-renamed `filter`/`token_filter`/`char_filter`.
+The `filters` and `char_filters` parameters have been renamed `filter` and `char_filter`.
+The `token_filters` parameter has been removed. Use `filter` instead.
==== `DELETE /_query` endpoint removed
diff --git a/docs/reference/migration/migrate_5_0/suggest.asciidoc b/docs/reference/migration/migrate_5_0/suggest.asciidoc
index 0b67711fe00..6979e8718e3 100644
--- a/docs/reference/migration/migrate_5_0/suggest.asciidoc
+++ b/docs/reference/migration/migrate_5_0/suggest.asciidoc
@@ -3,7 +3,8 @@
The completion suggester has undergone a complete rewrite. This means that the
syntax and data structure for fields of type `completion` have changed, as
-have the syntax and response of completion suggester requests.
+have the syntax and response of completion suggester requests. See
+<> for details.
For indices created before Elasticsearch 5.0.0, `completion` fields and the
completion suggester will continue to work as they did in Elasticsearch 2.x.
@@ -25,35 +26,17 @@ to suggestion entries for both context and completion suggesters.
==== Completion suggester is document-oriented
-Suggestions are aware of the document they belong to. This enables
-retrieving any field value from the document. This is exposed
-through the query-time `payload` option in `completion` and `context`
-suggesters:
+Suggestions are aware of the document they belong to. Now, associated
+documents (`_source`) are returned as part of `completion` suggestions.
-[source,sh]
----------------
-GET /my_index/_search
-{
- "suggest": {
- "fooSuggestion": {
- "text": "f"
- "completion": {
- "field": "fooSuggest",
- "payload": ["field1", "field2"]
- }
- }
- }
-}
----------------
+IMPORTANT: `_source` meta-field must be enabled, which is the default behavior,
+to enable returning `_source` with suggestions.
Previously, `context` and `completion` suggesters supported an index-time
`payloads` option, which was used to store and return metadata with suggestions.
-Now metadata can be stored as a field in the same document as the
-suggestion for enabling retrieval at query-time. The support for
-index-time `payloads` has been removed to avoid bloating the in-memory
-index with suggestion metadata. The time that it takes to retrieve payloads
-depends heavily on the size of the `_source` field. The smaller the `_source`,
-the faster the retrieval.
+Now metadata can be stored as part of the the same document as the
+suggestion for retrieval at query-time. The support for index-time `payloads`
+has been removed to avoid bloating the in-memory index with suggestion metadata.
==== Simpler completion indexing
diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc
index 68ba9696aa0..3072372d179 100644
--- a/docs/reference/query-dsl/function-score-query.asciidoc
+++ b/docs/reference/query-dsl/function-score-query.asciidoc
@@ -359,6 +359,7 @@ image:images/Gaussian.png[]
where image:images/sigma.png[] is computed to assure that the score takes the value `decay` at distance `scale` from `origin`+-`offset`
+// \sigma^2 = -scale^2/(2 \cdot ln(decay))
image:images/sigma_calc.png[]
See <> for graphs demonstrating the curve generated by the `gauss` function.
@@ -374,6 +375,7 @@ image:images/Exponential.png[]
where again the parameter image:images/lambda.png[] is computed to assure that the score takes the value `decay` at distance `scale` from `origin`+-`offset`
+// \lambda = ln(decay)/scale
image:images/lambda_calc.png[]
See <> for graphs demonstrating the curve generated by the `exp` function.
diff --git a/docs/reference/search/suggesters/completion-suggest.asciidoc b/docs/reference/search/suggesters/completion-suggest.asciidoc
index 587cdf86bd7..60065ce96bb 100644
--- a/docs/reference/search/suggesters/completion-suggest.asciidoc
+++ b/docs/reference/search/suggesters/completion-suggest.asciidoc
@@ -194,26 +194,50 @@ returns this response:
--------------------------------------------------
// TESTRESPONSE
-The configured weight for a suggestion is returned as `_score`.
-The `text` field uses the `input` of your indexed suggestion.
-Suggestions are document oriented, the document source is
-returned in `_source`. <>
-parameters are supported for filtering the document source.
+
+IMPORTANT: `_source` meta-field must be enabled, which is the default
+behavior, to enable returning `_source` with suggestions.
+
+The configured weight for a suggestion is returned as `_score`. The
+`text` field uses the `input` of your indexed suggestion. Suggestions
+return the full document `_source` by default. The size of the `_source`
+can impact performance due to disk fetch and network transport overhead.
+For best performance, filter out unnecessary fields from the `_source`
+using <> to minimize
+`_source` size. The following demonstrates an example completion query
+with source filtering:
+
+[source,js]
+--------------------------------------------------
+POST music/_suggest
+{
+ "_source": "completion.*",
+ "song-suggest" : {
+ "prefix" : "nir",
+ "completion" : {
+ "field" : "suggest"
+ }
+ }
+}
+--------------------------------------------------
The basic completion suggester query supports the following parameters:
`field`:: The name of the field on which to run the query (required).
`size`:: The number of suggestions to return (defaults to `5`).
-`payload`:: The name of the field or field name array to be returned
- as payload (defaults to no fields).
NOTE: The completion suggester considers all documents in the index.
See <> for an explanation of how to query a subset of
documents instead.
-NOTE: Specifying `payload` fields will incur additional search performance
-hit. The `payload` fields are retrieved eagerly (single pass) for top
-suggestions at the shard level using field data or from doc values.
+NOTE: In case of completion queries spanning more than one shard, the suggest
+is executed in two phases, where the last phase fetches the relevant documents
+from shards, implying executing completion requests against a single shard is
+more performant due to the document fetch overhead when the suggest spans
+multiple shards. To get best performance for completions, it is recommended to
+index completions into a single shard index. In case of high heap usage due to
+shard size, it is still recommended to break index into multiple shards instead
+of optimizing for completion performance.
[[fuzzy]]
==== Fuzzy queries
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java
new file mode 100644
index 00000000000..bfc32311733
--- /dev/null
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DotExpanderProcessor.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.common;
+
+import org.elasticsearch.ingest.AbstractProcessor;
+import org.elasticsearch.ingest.ConfigurationUtils;
+import org.elasticsearch.ingest.IngestDocument;
+import org.elasticsearch.ingest.Processor;
+
+import java.util.Map;
+
+public final class DotExpanderProcessor extends AbstractProcessor {
+
+ static final String TYPE = "dot_expander";
+
+ private final String path;
+ private final String field;
+
+ DotExpanderProcessor(String tag, String path, String field) {
+ super(tag);
+ this.path = path;
+ this.field = field;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public void execute(IngestDocument ingestDocument) throws Exception {
+ String path;
+ Map map;
+ if (this.path != null) {
+ path = this.path + "." + field;
+ map = ingestDocument.getFieldValue(this.path, Map.class);
+ } else {
+ path = field;
+ map = ingestDocument.getSourceAndMetadata();
+ }
+
+ if (ingestDocument.hasField(path)) {
+ Object value = map.remove(field);
+ ingestDocument.appendFieldValue(path, value);
+ } else {
+ // check whether we actually can expand the field in question into an object field.
+ // part of the path may already exist and if part of it would be a value field (string, integer etc.)
+ // then we can't override it with an object field and we should fail with a good reason.
+ // IngestDocument#setFieldValue(...) would fail too, but the error isn't very understandable
+ for (int index = path.indexOf('.'); index != -1; index = path.indexOf('.', index + 1)) {
+ String partialPath = path.substring(0, index);
+ if (ingestDocument.hasField(partialPath)) {
+ Object val = ingestDocument.getFieldValue(partialPath, Object.class);
+ if ((val instanceof Map) == false) {
+ throw new IllegalArgumentException("cannot expend [" + path + "], because [" + partialPath +
+ "] is not an object field, but a value field");
+ }
+ } else {
+ break;
+ }
+ }
+ Object value = map.remove(field);
+ ingestDocument.setFieldValue(path, value);
+ }
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ String getPath() {
+ return path;
+ }
+
+ String getField() {
+ return field;
+ }
+
+ public static final class Factory implements Processor.Factory {
+
+ @Override
+ public Processor create(Map processorFactories, String tag,
+ Map config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field");
+ if (field.contains(".") == false) {
+ throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field",
+ "field does not contain a dot");
+ }
+ if (field.indexOf('.') == 0 || field.lastIndexOf('.') == field.length() - 1) {
+ throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field",
+ "Field can't start or end with a dot");
+ }
+ int firstIndex = -1;
+ for (int index = field.indexOf('.'); index != -1; index = field.indexOf('.', index + 1)) {
+ if (index - firstIndex == 1) {
+ throw ConfigurationUtils.newConfigurationException(ConfigurationUtils.TAG_KEY, tag, "field",
+ "No space between dots");
+ }
+ firstIndex = index;
+ }
+
+ String path = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "path");
+ return new DotExpanderProcessor(tag, path, field);
+ }
+ }
+}
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java
index c89f6164de7..e6948771d8d 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java
@@ -61,6 +61,7 @@ public class IngestCommonPlugin extends Plugin implements IngestPlugin {
processors.put(SortProcessor.TYPE, new SortProcessor.Factory());
processors.put(GrokProcessor.TYPE, new GrokProcessor.Factory(builtinPatterns));
processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService));
+ processors.put(DotExpanderProcessor.TYPE, new DotExpanderProcessor.Factory());
return Collections.unmodifiableMap(processors);
}
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorFactoryTests.java
new file mode 100644
index 00000000000..be0695924ef
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorFactoryTests.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.common;
+
+import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
+
+public class DotExpanderProcessorFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory();
+
+ Map config = new HashMap<>();
+ config.put("field", "_field.field");
+ config.put("path", "_path");
+ DotExpanderProcessor processor = (DotExpanderProcessor) factory.create(null, "_tag", config);
+ assertThat(processor.getField(), equalTo("_field.field"));
+ assertThat(processor.getPath(), equalTo("_path"));
+
+ config = new HashMap<>();
+ config.put("field", "_field.field");
+ processor = (DotExpanderProcessor) factory.create(null, "_tag", config);
+ assertThat(processor.getField(), equalTo("_field.field"));
+ assertThat(processor.getPath(), nullValue());
+ }
+
+ public void testValidFields() throws Exception {
+ DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory();
+
+ String[] fields = new String[] {"a.b", "a.b.c", "a.b.c.d", "ab.cd"};
+ for (String field : fields) {
+ Map config = new HashMap<>();
+ config.put("field", field);
+ config.put("path", "_path");
+ DotExpanderProcessor processor = (DotExpanderProcessor) factory.create(null, "_tag", config);
+ assertThat(processor.getField(), equalTo(field));
+ assertThat(processor.getPath(), equalTo("_path"));
+ }
+ }
+
+ public void testCreate_fieldMissing() throws Exception {
+ DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory();
+
+ Map config = new HashMap<>();
+ config.put("path", "_path");
+ Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config));
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+
+ public void testCreate_invalidFields() throws Exception {
+ DotExpanderProcessor.Factory factory = new DotExpanderProcessor.Factory();
+ String[] fields = new String[] {"a", "abc"};
+ for (String field : fields) {
+ Map config = new HashMap<>();
+ config.put("field", field);
+ Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config));
+ assertThat(e.getMessage(), equalTo("[field] field does not contain a dot"));
+ }
+
+ fields = new String[] {".a", "a.", "."};
+ for (String field : fields) {
+ Map config = new HashMap<>();
+ config.put("field", field);
+ Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config));
+ assertThat(e.getMessage(), equalTo("[field] Field can't start or end with a dot"));
+ }
+
+ fields = new String[] {"a..b", "a...b", "a.b..c", "abc.def..hij"};
+ for (String field : fields) {
+ Map config = new HashMap<>();
+ config.put("field", field);
+ Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", config));
+ assertThat(e.getMessage(), equalTo("[field] No space between dots"));
+ }
+ }
+
+}
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java
new file mode 100644
index 00000000000..1802090e0e5
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.common;
+
+import org.elasticsearch.ingest.IngestDocument;
+import org.elasticsearch.ingest.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class DotExpanderProcessorTests extends ESTestCase {
+
+ public void testEscapeFields() throws Exception {
+ Map source = new HashMap<>();
+ source.put("foo.bar", "baz1");
+ IngestDocument document = new IngestDocument(source, Collections.emptyMap());
+ DotExpanderProcessor processor = new DotExpanderProcessor("_tag", null, "foo.bar");
+ processor.execute(document);
+ assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar", String.class), equalTo("baz1"));
+
+ source = new HashMap<>();
+ source.put("foo.bar.baz", "value");
+ document = new IngestDocument(source, Collections.emptyMap());
+ processor = new DotExpanderProcessor("_tag", null, "foo.bar.baz");
+ processor.execute(document);
+ assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar.baz", String.class), equalTo("value"));
+
+ source = new HashMap<>();
+ source.put("foo.bar", "baz1");
+ source.put("foo", new HashMap<>(Collections.singletonMap("bar", "baz2")));
+ document = new IngestDocument(source, Collections.emptyMap());
+ processor = new DotExpanderProcessor("_tag", null, "foo.bar");
+ processor.execute(document);
+ assertThat(document.getSourceAndMetadata().size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar", List.class).size(), equalTo(2));
+ assertThat(document.getFieldValue("foo.bar.0", String.class), equalTo("baz2"));
+ assertThat(document.getFieldValue("foo.bar.1", String.class), equalTo("baz1"));
+
+ source = new HashMap<>();
+ source.put("foo.bar", "2");
+ source.put("foo", new HashMap<>(Collections.singletonMap("bar", 1)));
+ document = new IngestDocument(source, Collections.emptyMap());
+ processor = new DotExpanderProcessor("_tag", null, "foo.bar");
+ processor.execute(document);
+ assertThat(document.getSourceAndMetadata().size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar", List.class).size(), equalTo(2));
+ assertThat(document.getFieldValue("foo.bar.0", Integer.class), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar.1", String.class), equalTo("2"));
+ }
+
+ public void testEscapeFields_valueField() throws Exception {
+ Map source = new HashMap<>();
+ source.put("foo.bar", "baz1");
+ source.put("foo", "baz2");
+ IngestDocument document1 = new IngestDocument(source, Collections.emptyMap());
+ Processor processor1 = new DotExpanderProcessor("_tag", null, "foo.bar");
+ // foo already exists and if a leaf field and therefor can't be replaced by a map field:
+ Exception e = expectThrows(IllegalArgumentException.class, () -> processor1.execute(document1));
+ assertThat(e.getMessage(), equalTo("cannot expend [foo.bar], because [foo] is not an object field, but a value field"));
+
+ // so because foo is no branch field but a value field the `foo.bar` field can't be expanded
+ // into [foo].[bar], so foo should be renamed first into `[foo].[bar]:
+ IngestDocument document = new IngestDocument(source, Collections.emptyMap());
+ Processor processor = new RenameProcessor("_tag", "foo", "foo.bar");
+ processor.execute(document);
+ processor = new DotExpanderProcessor("_tag", null, "foo.bar");
+ processor.execute(document);
+ assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar.0", String.class), equalTo("baz2"));
+ assertThat(document.getFieldValue("foo.bar.1", String.class), equalTo("baz1"));
+
+ source = new HashMap<>();
+ source.put("foo.bar", "baz1");
+ document = new IngestDocument(source, Collections.emptyMap());
+ processor = new DotExpanderProcessor("_tag", null, "foo.bar");
+ processor.execute(document);
+ assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar", String.class), equalTo("baz1"));
+
+ source = new HashMap<>();
+ source.put("foo.bar.baz", "baz1");
+ source.put("foo", new HashMap<>(Collections.singletonMap("bar", new HashMap<>())));
+ document = new IngestDocument(source, Collections.emptyMap());
+ processor = new DotExpanderProcessor("_tag", null, "foo.bar.baz");
+ processor.execute(document);
+ assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar.baz", String.class), equalTo("baz1"));
+
+ source = new HashMap<>();
+ source.put("foo.bar.baz", "baz1");
+ source.put("foo", new HashMap<>(Collections.singletonMap("bar", "baz2")));
+ IngestDocument document2 = new IngestDocument(source, Collections.emptyMap());
+ Processor processor2 = new DotExpanderProcessor("_tag", null, "foo.bar.baz");
+ e = expectThrows(IllegalArgumentException.class, () -> processor2.execute(document2));
+ assertThat(e.getMessage(), equalTo("cannot expend [foo.bar.baz], because [foo.bar] is not an object field, but a value field"));
+ }
+
+ public void testEscapeFields_path() throws Exception {
+ Map source = new HashMap<>();
+ source.put("foo", new HashMap<>(Collections.singletonMap("bar.baz", "value")));
+ IngestDocument document = new IngestDocument(source, Collections.emptyMap());
+ DotExpanderProcessor processor = new DotExpanderProcessor("_tag", "foo", "bar.baz");
+ processor.execute(document);
+ assertThat(document.getFieldValue("foo", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("foo.bar.baz", String.class), equalTo("value"));
+
+ source = new HashMap<>();
+ source.put("field", new HashMap<>(Collections.singletonMap("foo.bar.baz", "value")));
+ document = new IngestDocument(source, Collections.emptyMap());
+ processor = new DotExpanderProcessor("_tag", "field", "foo.bar.baz");
+ processor.execute(document);
+ assertThat(document.getFieldValue("field.foo", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("field.foo.bar", Map.class).size(), equalTo(1));
+ assertThat(document.getFieldValue("field.foo.bar.baz", String.class), equalTo("value"));
+ }
+
+}
diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml
index 14f58369dfa..e37b2d83183 100644
--- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml
+++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yaml
@@ -13,17 +13,18 @@
- match: { nodes.$master.ingest.processors.1.type: convert }
- match: { nodes.$master.ingest.processors.2.type: date }
- match: { nodes.$master.ingest.processors.3.type: date_index_name }
- - match: { nodes.$master.ingest.processors.4.type: fail }
- - match: { nodes.$master.ingest.processors.5.type: foreach }
- - match: { nodes.$master.ingest.processors.6.type: grok }
- - match: { nodes.$master.ingest.processors.7.type: gsub }
- - match: { nodes.$master.ingest.processors.8.type: join }
- - match: { nodes.$master.ingest.processors.9.type: lowercase }
- - match: { nodes.$master.ingest.processors.10.type: remove }
- - match: { nodes.$master.ingest.processors.11.type: rename }
- - match: { nodes.$master.ingest.processors.12.type: script }
- - match: { nodes.$master.ingest.processors.13.type: set }
- - match: { nodes.$master.ingest.processors.14.type: sort }
- - match: { nodes.$master.ingest.processors.15.type: split }
- - match: { nodes.$master.ingest.processors.16.type: trim }
- - match: { nodes.$master.ingest.processors.17.type: uppercase }
+ - match: { nodes.$master.ingest.processors.4.type: dot_expander }
+ - match: { nodes.$master.ingest.processors.5.type: fail }
+ - match: { nodes.$master.ingest.processors.6.type: foreach }
+ - match: { nodes.$master.ingest.processors.7.type: grok }
+ - match: { nodes.$master.ingest.processors.8.type: gsub }
+ - match: { nodes.$master.ingest.processors.9.type: join }
+ - match: { nodes.$master.ingest.processors.10.type: lowercase }
+ - match: { nodes.$master.ingest.processors.11.type: remove }
+ - match: { nodes.$master.ingest.processors.12.type: rename }
+ - match: { nodes.$master.ingest.processors.13.type: script }
+ - match: { nodes.$master.ingest.processors.14.type: set }
+ - match: { nodes.$master.ingest.processors.15.type: sort }
+ - match: { nodes.$master.ingest.processors.16.type: split }
+ - match: { nodes.$master.ingest.processors.17.type: trim }
+ - match: { nodes.$master.ingest.processors.18.type: uppercase }
diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/130_escape_dot.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/130_escape_dot.yaml
new file mode 100644
index 00000000000..1d537ffa6b7
--- /dev/null
+++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/130_escape_dot.yaml
@@ -0,0 +1,40 @@
+---
+teardown:
+ - do:
+ ingest.delete_pipeline:
+ id: "1"
+ ignore: 404
+
+---
+"Test escape_dot processor":
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "dot_expander" : {
+ "field" : "foo.bar"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ type: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo.bar: "baz"
+ }
+
+ - do:
+ get:
+ index: test
+ type: test
+ id: 1
+ - match: { _source.foo.bar: "baz" }
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java
index b0fc9b428ba..1a262a32d3d 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java
@@ -206,14 +206,6 @@ public class RoundTripTests extends ESTestCase {
emptyMap()); // Params
}
- private long randomPositiveLong() {
- long l;
- do {
- l = randomLong();
- } while (l < 0);
- return l;
- }
-
private void assertResponseEquals(BulkIndexByScrollResponse expected, BulkIndexByScrollResponse actual) {
assertEquals(expected.getTook(), actual.getTook());
assertTaskStatusEquals(expected.getStatus(), actual.getStatus());
diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle
index 506215708e2..9ad2f8d02e2 100644
--- a/plugins/discovery-ec2/build.gradle
+++ b/plugins/discovery-ec2/build.gradle
@@ -42,6 +42,12 @@ dependencyLicenses {
mapping from: /jackson-.*/, to: 'jackson'
}
+bundlePlugin {
+ from('config/discovery-ec2') {
+ into 'config'
+ }
+}
+
test {
// this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name
diff --git a/plugins/discovery-ec2/config/discovery-ec2/log4j2.properties b/plugins/discovery-ec2/config/discovery-ec2/log4j2.properties
new file mode 100644
index 00000000000..aa52f0232e0
--- /dev/null
+++ b/plugins/discovery-ec2/config/discovery-ec2/log4j2.properties
@@ -0,0 +1,8 @@
+logger.com_amazonaws.name = com.amazonaws
+logger.com_amazonaws.level = warn
+
+logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.name = com.amazonaws.jmx.SdkMBeanRegistrySupport
+logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.level = error
+
+logger.com_amazonaws_metrics_AwsSdkMetrics.name = com.amazonaws.metrics.AwsSdkMetrics
+logger.com_amazonaws_metrics_AwsSdkMetrics.level = error
diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java
index 634a4ca6dfa..c3614952ecf 100644
--- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java
+++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java
@@ -41,7 +41,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase {
final Object compiled = se.compile(null, "x + y", Collections.emptyMap());
final AtomicBoolean failed = new AtomicBoolean();
- Thread[] threads = new Thread[50];
+ Thread[] threads = new Thread[between(3, 12)];
final CountDownLatch latch = new CountDownLatch(threads.length);
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
for (int i = 0; i < threads.length; i++) {
@@ -57,7 +57,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase {
vars.put("x", x);
vars.put("y", y);
ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled), vars);
- for (int i = 0; i < 100000; i++) {
+ for (int i = 0; i < between(100, 1000); i++) {
long result = ((Number) script.run()).longValue();
assertThat(result, equalTo(addition));
}
@@ -83,7 +83,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase {
final Object compiled = se.compile(null, "x + y", Collections.emptyMap());
final AtomicBoolean failed = new AtomicBoolean();
- Thread[] threads = new Thread[50];
+ Thread[] threads = new Thread[between(3, 12)];
final CountDownLatch latch = new CountDownLatch(threads.length);
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
for (int i = 0; i < threads.length; i++) {
@@ -96,7 +96,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase {
Map vars = new HashMap();
vars.put("x", x);
ExecutableScript script = se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled), vars);
- for (int i = 0; i < 100000; i++) {
+ for (int i = 0; i < between(100, 1000); i++) {
long y = Randomness.get().nextInt();
long addition = x + y;
script.setNextVar("y", y);
@@ -125,7 +125,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase {
final Object compiled = se.compile(null, "x + y", Collections.emptyMap());
final AtomicBoolean failed = new AtomicBoolean();
- Thread[] threads = new Thread[50];
+ Thread[] threads = new Thread[between(3, 12)];
final CountDownLatch latch = new CountDownLatch(threads.length);
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
for (int i = 0; i < threads.length; i++) {
@@ -135,7 +135,7 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase {
try {
barrier.await();
Map runtimeVars = new HashMap();
- for (int i = 0; i < 100000; i++) {
+ for (int i = 0; i < between(100, 1000); i++) {
long x = Randomness.get().nextInt();
long y = Randomness.get().nextInt();
long addition = x + y;
diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle
index a6610178ce8..b1369908670 100644
--- a/plugins/repository-s3/build.gradle
+++ b/plugins/repository-s3/build.gradle
@@ -48,6 +48,12 @@ dependencyLicenses {
mapping from: /jaxb-.*/, to: 'jaxb'
}
+bundlePlugin {
+ from('config/repository-s3') {
+ into 'config'
+ }
+}
+
test {
// this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name
diff --git a/plugins/repository-s3/config/repository-s3/log4j2.properties b/plugins/repository-s3/config/repository-s3/log4j2.properties
new file mode 100644
index 00000000000..3fee57ce3e2
--- /dev/null
+++ b/plugins/repository-s3/config/repository-s3/log4j2.properties
@@ -0,0 +1,8 @@
+logger.com_amazonaws.name = com.amazonaws
+logger.com_amazonaws.level = warn
+
+logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.name = com.amazonaws.jmx.SdkMBeanRegistrySupport
+logger.com_amazonaws_jmx_SdkMBeanRegistrySupport.level = error
+
+logger_com_amazonaws_metrics_AwsSdkMetrics.name = com.amazonaws.metrics.AwsSdkMetrics
+logger_com_amazonaws_metrics_AwsSdkMetrics.level = error
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java
index a14ee2282c4..168453df763 100644
--- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java
@@ -21,15 +21,16 @@ package org.elasticsearch.common.logging;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
+import org.elasticsearch.Version;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.hamcrest.RegexMatcher;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -66,21 +67,22 @@ public class EvilLoggerTests extends ESTestCase {
testLogger.trace("This is a trace message");
final String path = System.getProperty("es.logs") + ".log";
final List events = Files.readAllLines(PathUtils.get(path));
- assertThat(events.size(), equalTo(5));
+ assertThat(events.size(), equalTo(6)); // the five messages me log plus a warning for unsupported configuration files
final String location = "org.elasticsearch.common.logging.EvilLoggerTests.testLocationInfoTest";
- assertLogLine(events.get(0), Level.ERROR, location, "This is an error message");
- assertLogLine(events.get(1), Level.WARN, location, "This is a warning message");
- assertLogLine(events.get(2), Level.INFO, location, "This is an info message");
- assertLogLine(events.get(3), Level.DEBUG, location, "This is a debug message");
- assertLogLine(events.get(4), Level.TRACE, location, "This is a trace message");
+ // the first message is a warning for unsupported configuration files
+ assertLogLine(events.get(1), Level.ERROR, location, "This is an error message");
+ assertLogLine(events.get(2), Level.WARN, location, "This is a warning message");
+ assertLogLine(events.get(3), Level.INFO, location, "This is an info message");
+ assertLogLine(events.get(4), Level.DEBUG, location, "This is a debug message");
+ assertLogLine(events.get(5), Level.TRACE, location, "This is a trace message");
}
private void assertLogLine(final String logLine, final Level level, final String location, final String message) {
final Matcher matcher = Pattern.compile("\\[(.*)\\]\\[(.*)\\(.*\\)\\] (.*)").matcher(logLine);
assertTrue(logLine, matcher.matches());
assertThat(matcher.group(1), equalTo(level.toString()));
- assertThat(matcher.group(2), equalTo(location));
- assertThat(matcher.group(3), equalTo(message));
+ assertThat(matcher.group(2), RegexMatcher.matches(location));
+ assertThat(matcher.group(3), RegexMatcher.matches(message));
}
public void testDeprecationLogger() throws IOException {
@@ -95,4 +97,17 @@ public class EvilLoggerTests extends ESTestCase {
"This is a deprecation message");
}
+ public void testUnsupportedLoggingConfigurationFiles() throws IOException {
+ // TODO: the warning for unsupported logging configurations can be removed in 6.0.0
+ assert Version.CURRENT.major < 6;
+ final String path = System.getProperty("es.logs") + ".log";
+ final List events = Files.readAllLines(PathUtils.get(path));
+ assertThat(events.size(), equalTo(1));
+ assertLogLine(
+ events.get(0),
+ Level.WARN,
+ "org\\.elasticsearch\\.common\\.logging\\.LogConfigurator.*",
+ "ignoring unsupported logging configuration file \\[.*\\], logging is configured via \\[.*\\]");
+ }
+
}
diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/logging.yml b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/logging.yml
new file mode 100644
index 00000000000..5aa98f454f3
--- /dev/null
+++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/logging.yml
@@ -0,0 +1,2 @@
+logger.level: INFO
+rootLogger: ${logger.level}, terminal
diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
index b0575d74817..0e411a6efc8 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
@@ -302,6 +302,14 @@ public abstract class ESTestCase extends LuceneTestCase {
return random().nextInt();
}
+ public static long randomPositiveLong() {
+ long randomLong;
+ do {
+ randomLong = randomLong();
+ } while (randomLong == Long.MIN_VALUE);
+ return Math.abs(randomLong);
+ }
+
public static float randomFloat() {
return random().nextFloat();
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java b/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java
index e93f355d64c..fe46251e3ee 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java
@@ -61,7 +61,7 @@ import static org.junit.Assert.assertEquals;
public class OldIndexUtils {
- public static List loadIndexesList(String prefix, Path bwcIndicesPath) throws IOException {
+ public static List loadDataFilesList(String prefix, Path bwcIndicesPath) throws IOException {
List indexes = new ArrayList<>();
try (DirectoryStream stream = Files.newDirectoryStream(bwcIndicesPath, prefix + "-*.zip")) {
for (Path path : stream) {