Merge remote-tracking branch 'es/7.x' into enrich-7.x
This commit is contained in:
commit
44f09a9a86
|
@ -161,13 +161,17 @@ def deb_common(config, name, extra: '')
|
||||||
s.privileged = false
|
s.privileged = false
|
||||||
s.inline = "sudo sed -i '/tty/!s/mesg n/tty -s \\&\\& mesg n/' /root/.profile"
|
s.inline = "sudo sed -i '/tty/!s/mesg n/tty -s \\&\\& mesg n/' /root/.profile"
|
||||||
end
|
end
|
||||||
|
extra_with_lintian = <<-SHELL
|
||||||
|
install lintian
|
||||||
|
#{extra}
|
||||||
|
SHELL
|
||||||
linux_common(
|
linux_common(
|
||||||
config,
|
config,
|
||||||
name,
|
name,
|
||||||
update_command: 'apt-get update',
|
update_command: 'apt-get update',
|
||||||
update_tracking_file: '/var/cache/apt/archives/last_update',
|
update_tracking_file: '/var/cache/apt/archives/last_update',
|
||||||
install_command: 'apt-get install -y',
|
install_command: 'apt-get install -y',
|
||||||
extra: extra
|
extra: extra_with_lintian
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,9 @@ import org.gradle.api.plugins.quality.Checkstyle
|
||||||
class PrecommitTasks {
|
class PrecommitTasks {
|
||||||
|
|
||||||
/** Adds a precommit task, which depends on non-test verification tasks. */
|
/** Adds a precommit task, which depends on non-test verification tasks. */
|
||||||
|
|
||||||
|
public static final String CHECKSTYLE_VERSION = '8.20'
|
||||||
|
|
||||||
public static Task create(Project project, boolean includeDependencyLicenses) {
|
public static Task create(Project project, boolean includeDependencyLicenses) {
|
||||||
project.configurations.create("forbiddenApisCliJar")
|
project.configurations.create("forbiddenApisCliJar")
|
||||||
project.dependencies {
|
project.dependencies {
|
||||||
|
@ -214,7 +217,7 @@ class PrecommitTasks {
|
||||||
configProperties = [
|
configProperties = [
|
||||||
suppressions: checkstyleSuppressions
|
suppressions: checkstyleSuppressions
|
||||||
]
|
]
|
||||||
toolVersion = '8.10.1'
|
toolVersion = CHECKSTYLE_VERSION
|
||||||
}
|
}
|
||||||
|
|
||||||
project.tasks.withType(Checkstyle) { task ->
|
project.tasks.withType(Checkstyle) { task ->
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
5.3
|
5.4.1
|
|
@ -270,13 +270,13 @@ apply plugin: 'nebula.ospackage-base'
|
||||||
// this is package indepdendent configuration
|
// this is package indepdendent configuration
|
||||||
ospackage {
|
ospackage {
|
||||||
maintainer 'Elasticsearch Team <info@elastic.co>'
|
maintainer 'Elasticsearch Team <info@elastic.co>'
|
||||||
summary '''
|
summary 'Distributed RESTful search engine built for the cloud'
|
||||||
Elasticsearch is a distributed RESTful search engine built for the cloud.
|
packageDescription '''
|
||||||
Reference documentation can be found at
|
Reference documentation can be found at
|
||||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html
|
https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html
|
||||||
and the 'Elasticsearch: The Definitive Guide' book can be found at
|
and the 'Elasticsearch: The Definitive Guide' book can be found at
|
||||||
https://www.elastic.co/guide/en/elasticsearch/guide/current/index.html
|
https://www.elastic.co/guide/en/elasticsearch/guide/current/index.html
|
||||||
'''.stripIndent().replace('\n', ' ').trim()
|
'''.stripIndent().trim()
|
||||||
url 'https://www.elastic.co/'
|
url 'https://www.elastic.co/'
|
||||||
|
|
||||||
// signing setup
|
// signing setup
|
||||||
|
@ -288,7 +288,8 @@ ospackage {
|
||||||
new File(new File(System.getProperty('user.home'), '.gnupg'), 'secring.gpg')
|
new File(new File(System.getProperty('user.home'), '.gnupg'), 'secring.gpg')
|
||||||
}
|
}
|
||||||
|
|
||||||
requires('coreutils')
|
// version found on oldest supported distro, centos-6
|
||||||
|
requires('coreutils', '8.4', GREATER | EQUAL)
|
||||||
|
|
||||||
fileMode 0644
|
fileMode 0644
|
||||||
dirMode 0755
|
dirMode 0755
|
||||||
|
@ -312,12 +313,18 @@ Closure commonDebConfig(boolean oss, boolean jdk) {
|
||||||
|
|
||||||
version = project.version.replace('-', '~')
|
version = project.version.replace('-', '~')
|
||||||
packageGroup 'web'
|
packageGroup 'web'
|
||||||
requires 'bash'
|
|
||||||
|
// versions found on oldest supported distro, centos-6
|
||||||
|
requires('bash', '4.1', GREATER | EQUAL)
|
||||||
|
requires('lsb-base', '4', GREATER | EQUAL)
|
||||||
requires 'libc6'
|
requires 'libc6'
|
||||||
requires 'adduser'
|
requires 'adduser'
|
||||||
|
|
||||||
into('/usr/share/lintian/overrides') {
|
into('/usr/share/lintian/overrides') {
|
||||||
from('src/deb/lintian/elasticsearch')
|
from('src/deb/lintian/elasticsearch')
|
||||||
|
if (oss) {
|
||||||
|
rename('elasticsearch', 'elasticsearch-oss')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,48 @@
|
||||||
# Ignore arch dependent warnings, we chose the right libs on start
|
# we don't have a changelog, but we put our copyright file
|
||||||
elasticsearch binary: arch-independent-package-contains-binary-or-object
|
# under /usr/share/doc/elasticsearch, which triggers this warning
|
||||||
# Not stripping external libraries
|
changelog-file-missing-in-native-package
|
||||||
elasticsearch binary: unstripped-binary-or-object
|
|
||||||
# Ignore arch dependent warnings, we chose the right libs on start
|
# we intentionally copy our copyright file for all deb packages
|
||||||
elasticsearch binary: arch-dependent-file-in-usr-share
|
copyright-file-contains-full-apache-2-license
|
||||||
# Please check our changelog at http://www.elastic.co/downloads/elasticsearch
|
copyright-should-refer-to-common-license-file-for-apache-2
|
||||||
elasticsearch binary: changelog-file-missing-in-native-package
|
copyright-without-copyright-notice
|
||||||
|
|
||||||
|
# we still put all our files under /usr/share/elasticsearch even after transition to platform dependent packages
|
||||||
|
arch-dependent-file-in-usr-share
|
||||||
|
|
||||||
|
# we have a bundled jdk, so don't use jarwrapper
|
||||||
|
missing-dep-on-jarwrapper
|
||||||
|
|
||||||
|
# we prefer to not make our config and log files world readable
|
||||||
|
non-standard-file-perm etc/default/elasticsearch 0660 != 0644
|
||||||
|
non-standard-dir-perm etc/elasticsearch/ 2750 != 0755
|
||||||
|
non-standard-file-perm etc/elasticsearch/*
|
||||||
|
non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755
|
||||||
|
non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755
|
||||||
|
executable-is-not-world-readable etc/init.d/elasticsearch 0750
|
||||||
|
non-standard-file-permissions-for-etc-init.d-script etc/init.d/elasticsearch 0750 != 0755
|
||||||
|
|
||||||
|
# this lintian tag is simply wrong; contrary to the explanation, debian systemd
|
||||||
|
# does actually look at /usr/lib/systemd/system
|
||||||
|
systemd-service-file-outside-lib usr/lib/systemd/system/elasticsearch.service
|
||||||
|
|
||||||
|
# we do not automatically enable the service in init.d or systemd
|
||||||
|
script-in-etc-init.d-not-registered-via-update-rc.d etc/init.d/elasticsearch
|
||||||
|
|
||||||
|
# the package scripts handle init.d/systemd directly and don't need to use deb helpers
|
||||||
|
maintainer-script-calls-systemctl
|
||||||
|
prerm-calls-updaterc.d elasticsearch
|
||||||
|
|
||||||
|
# bundled JDK
|
||||||
|
embedded-library
|
||||||
|
arch-dependent-file-in-usr-share usr/share/elasticsearch/jdk/*
|
||||||
|
unstripped-binary-or-object usr/share/elasticsearch/jdk/*
|
||||||
|
extra-license-file usr/share/elasticsearch/jdk/legal/*
|
||||||
|
hardening-no-pie usr/share/elasticsearch/jdk/bin/*
|
||||||
|
hardening-no-pie usr/share/elasticsearch/jdk/lib/*
|
||||||
|
|
||||||
|
# the system java version that lintian assumes is far behind what elasticsearch uses
|
||||||
|
unknown-java-class-version
|
||||||
|
|
||||||
|
# elastic licensed modules contain elastic license
|
||||||
|
extra-license-file usr/share/elasticsearch/modules/*
|
||||||
|
|
|
@ -18,7 +18,7 @@ source "`dirname "$0"`"/elasticsearch-env
|
||||||
|
|
||||||
ES_JVM_OPTIONS="$ES_PATH_CONF"/jvm.options
|
ES_JVM_OPTIONS="$ES_PATH_CONF"/jvm.options
|
||||||
JVM_OPTIONS=`"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.JvmOptionsParser "$ES_JVM_OPTIONS"`
|
JVM_OPTIONS=`"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.JvmOptionsParser "$ES_JVM_OPTIONS"`
|
||||||
ES_JAVA_OPTS="${JVM_OPTIONS//\$\{ES_TMPDIR\}/$ES_TMPDIR} $ES_JAVA_OPTS"
|
ES_JAVA_OPTS="${JVM_OPTIONS//\$\{ES_TMPDIR\}/$ES_TMPDIR}"
|
||||||
|
|
||||||
# manual parsing to find out, if process should be detached
|
# manual parsing to find out, if process should be detached
|
||||||
if ! echo $* | grep -E '(^-d |-d$| -d |--daemonize$|--daemonize )' > /dev/null; then
|
if ! echo $* | grep -E '(^-d |-d$| -d |--daemonize$|--daemonize )' > /dev/null; then
|
||||||
|
|
|
@ -112,7 +112,7 @@ if not "%ES_JAVA_OPTS%" == "" set ES_JAVA_OPTS=%ES_JAVA_OPTS: =;%
|
||||||
|
|
||||||
@setlocal
|
@setlocal
|
||||||
for /F "usebackq delims=" %%a in (`"%JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.JvmOptionsParser" "!ES_JVM_OPTIONS!" || echo jvm_options_parser_failed"`) do set JVM_OPTIONS=%%a
|
for /F "usebackq delims=" %%a in (`"%JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.JvmOptionsParser" "!ES_JVM_OPTIONS!" || echo jvm_options_parser_failed"`) do set JVM_OPTIONS=%%a
|
||||||
@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% %ES_JAVA_OPTS%
|
@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!%
|
||||||
|
|
||||||
if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" (
|
if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" (
|
||||||
exit /b 1
|
exit /b 1
|
||||||
|
|
|
@ -44,7 +44,7 @@ IF ERRORLEVEL 1 (
|
||||||
set ES_JVM_OPTIONS=%ES_PATH_CONF%\jvm.options
|
set ES_JVM_OPTIONS=%ES_PATH_CONF%\jvm.options
|
||||||
@setlocal
|
@setlocal
|
||||||
for /F "usebackq delims=" %%a in (`CALL %JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.JvmOptionsParser" "!ES_JVM_OPTIONS!" ^|^| echo jvm_options_parser_failed`) do set JVM_OPTIONS=%%a
|
for /F "usebackq delims=" %%a in (`CALL %JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.JvmOptionsParser" "!ES_JVM_OPTIONS!" ^|^| echo jvm_options_parser_failed`) do set JVM_OPTIONS=%%a
|
||||||
@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% %ES_JAVA_OPTS%
|
@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!%
|
||||||
|
|
||||||
if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" (
|
if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" (
|
||||||
exit /b 1
|
exit /b 1
|
||||||
|
|
|
@ -19,24 +19,28 @@
|
||||||
|
|
||||||
package org.elasticsearch.tools.launchers;
|
package org.elasticsearch.tools.launchers;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.Paths;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tunes Elasticsearch JVM settings based on inspection of provided JVM options.
|
* Tunes Elasticsearch JVM settings based on inspection of provided JVM options.
|
||||||
*/
|
*/
|
||||||
final class JvmErgonomics {
|
final class JvmErgonomics {
|
||||||
private static final long KB = 1024L;
|
|
||||||
|
|
||||||
private static final long MB = 1024L * 1024L;
|
|
||||||
|
|
||||||
private static final long GB = 1024L * 1024L * 1024L;
|
|
||||||
|
|
||||||
|
|
||||||
private JvmErgonomics() {
|
private JvmErgonomics() {
|
||||||
throw new AssertionError("No instances intended");
|
throw new AssertionError("No instances intended");
|
||||||
|
@ -48,48 +52,74 @@ final class JvmErgonomics {
|
||||||
* @param userDefinedJvmOptions A list of JVM options that have been defined by the user.
|
* @param userDefinedJvmOptions A list of JVM options that have been defined by the user.
|
||||||
* @return A list of additional JVM options to set.
|
* @return A list of additional JVM options to set.
|
||||||
*/
|
*/
|
||||||
static List<String> choose(List<String> userDefinedJvmOptions) {
|
static List<String> choose(final List<String> userDefinedJvmOptions) throws InterruptedException, IOException {
|
||||||
List<String> ergonomicChoices = new ArrayList<>();
|
final List<String> ergonomicChoices = new ArrayList<>();
|
||||||
Long heapSize = extractHeapSize(userDefinedJvmOptions);
|
final Map<String, Optional<String>> finalJvmOptions = finalJvmOptions(userDefinedJvmOptions);
|
||||||
Map<String, String> systemProperties = extractSystemProperties(userDefinedJvmOptions);
|
final long heapSize = extractHeapSize(finalJvmOptions);
|
||||||
if (heapSize != null) {
|
final Map<String, String> systemProperties = extractSystemProperties(userDefinedJvmOptions);
|
||||||
if (systemProperties.containsKey("io.netty.allocator.type") == false) {
|
if (systemProperties.containsKey("io.netty.allocator.type") == false) {
|
||||||
if (heapSize <= 1 * GB) {
|
if (heapSize <= 1 << 30) {
|
||||||
ergonomicChoices.add("-Dio.netty.allocator.type=unpooled");
|
ergonomicChoices.add("-Dio.netty.allocator.type=unpooled");
|
||||||
} else {
|
} else {
|
||||||
ergonomicChoices.add("-Dio.netty.allocator.type=pooled");
|
ergonomicChoices.add("-Dio.netty.allocator.type=pooled");
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ergonomicChoices;
|
return ergonomicChoices;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Pattern MAX_HEAP_SIZE = Pattern.compile("^(-Xmx|-XX:MaxHeapSize=)(?<size>\\d+)(?<unit>\\w)?$");
|
private static final Pattern OPTION =
|
||||||
|
Pattern.compile("^\\s*\\S+\\s+(?<flag>\\S+)\\s+:?=\\s+(?<value>\\S+)?\\s+\\{[^}]+?\\}\\s+\\{[^}]+}");
|
||||||
|
|
||||||
|
static Map<String, Optional<String>> finalJvmOptions(
|
||||||
|
final List<String> userDefinedJvmOptions) throws InterruptedException, IOException {
|
||||||
|
return Collections.unmodifiableMap(flagsFinal(userDefinedJvmOptions).stream()
|
||||||
|
.map(OPTION::matcher).filter(Matcher::matches)
|
||||||
|
.collect(Collectors.toMap(m -> m.group("flag"), m -> Optional.ofNullable(m.group("value")))));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<String> flagsFinal(final List<String> userDefinedJvmOptions) throws InterruptedException, IOException {
|
||||||
|
/*
|
||||||
|
* To deduce the final set of JVM options that Elasticsearch is going to start with, we start a separate Java process with the JVM
|
||||||
|
* options that we would pass on the command line. For this Java process we will add two additional flags, -XX:+PrintFlagsFinal and
|
||||||
|
* -version. This causes the Java process that we start to parse the JVM options into their final values, display them on standard
|
||||||
|
* output, print the version to standard error, and then exit. The JVM itself never bootstraps, and therefore this process is
|
||||||
|
* lightweight. By doing this, we get the JVM options parsed exactly as the JVM that we are going to execute would parse them
|
||||||
|
* without having to implement our own JVM option parsing logic.
|
||||||
|
*/
|
||||||
|
final String java = Paths.get(System.getProperty("java.home"), "bin", "java").toString();
|
||||||
|
final List<String> command =
|
||||||
|
Collections.unmodifiableList(
|
||||||
|
Stream.of(Stream.of(java), userDefinedJvmOptions.stream(), Stream.of("-XX:+PrintFlagsFinal"), Stream.of("-version"))
|
||||||
|
.reduce(Stream::concat)
|
||||||
|
.get()
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
final Process process = new ProcessBuilder().command(command).start();
|
||||||
|
final List<String> output = readLinesFromInputStream(process.getInputStream());
|
||||||
|
final List<String> error = readLinesFromInputStream(process.getErrorStream());
|
||||||
|
final int status = process.waitFor();
|
||||||
|
if (status != 0) {
|
||||||
|
final String message = String.format(
|
||||||
|
Locale.ROOT,
|
||||||
|
"starting java failed with [%d]\noutput:\n%s\nerror:\n%s",
|
||||||
|
status,
|
||||||
|
String.join("\n", output),
|
||||||
|
String.join("\n", error));
|
||||||
|
throw new RuntimeException(message);
|
||||||
|
} else {
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<String> readLinesFromInputStream(final InputStream is) throws IOException {
|
||||||
|
try (InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8);
|
||||||
|
BufferedReader br = new BufferedReader(isr)) {
|
||||||
|
return Collections.unmodifiableList(br.lines().collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// package private for testing
|
// package private for testing
|
||||||
static Long extractHeapSize(List<String> userDefinedJvmOptions) {
|
static Long extractHeapSize(final Map<String, Optional<String>> finalJvmOptions) {
|
||||||
for (String jvmOption : userDefinedJvmOptions) {
|
return Long.parseLong(finalJvmOptions.get("MaxHeapSize").get());
|
||||||
final Matcher matcher = MAX_HEAP_SIZE.matcher(jvmOption);
|
|
||||||
if (matcher.matches()) {
|
|
||||||
final long size = Long.parseLong(matcher.group("size"));
|
|
||||||
final String unit = matcher.group("unit");
|
|
||||||
if (unit == null) {
|
|
||||||
return size;
|
|
||||||
} else {
|
|
||||||
switch (unit.toLowerCase(Locale.ROOT)) {
|
|
||||||
case "k":
|
|
||||||
return size * KB;
|
|
||||||
case "m":
|
|
||||||
return size * MB;
|
|
||||||
case "g":
|
|
||||||
return size * GB;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unknown unit [" + unit + "] for max heap size in [" + jvmOption + "]");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Pattern SYSTEM_PROPERTY = Pattern.compile("^-D(?<key>[\\w+].*?)=(?<value>.*)$");
|
private static final Pattern SYSTEM_PROPERTY = Pattern.compile("^-D(?<key>[\\w+].*?)=(?<value>.*)$");
|
||||||
|
@ -105,4 +135,5 @@ final class JvmErgonomics {
|
||||||
}
|
}
|
||||||
return systemProperties;
|
return systemProperties;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,12 +19,14 @@
|
||||||
|
|
||||||
package org.elasticsearch.tools.launchers;
|
package org.elasticsearch.tools.launchers;
|
||||||
|
|
||||||
|
import org.elasticsearch.tools.java_version_checker.JavaVersion;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
import java.nio.charset.Charset;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -37,8 +39,7 @@ import java.util.SortedMap;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
import org.elasticsearch.tools.java_version_checker.JavaVersion;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses JVM options from a file and prints a single line with all JVM options to standard output.
|
* Parses JVM options from a file and prints a single line with all JVM options to standard output.
|
||||||
|
@ -51,14 +52,14 @@ final class JvmOptionsParser {
|
||||||
*
|
*
|
||||||
* @param args the args to the program which should consist of a single option, the path to the JVM options
|
* @param args the args to the program which should consist of a single option, the path to the JVM options
|
||||||
*/
|
*/
|
||||||
public static void main(final String[] args) throws IOException {
|
public static void main(final String[] args) throws InterruptedException, IOException {
|
||||||
if (args.length != 1) {
|
if (args.length != 1) {
|
||||||
throw new IllegalArgumentException("expected one argument specifying path to jvm.options but was " + Arrays.toString(args));
|
throw new IllegalArgumentException("expected one argument specifying path to jvm.options but was " + Arrays.toString(args));
|
||||||
}
|
}
|
||||||
final List<String> jvmOptions = new ArrayList<>();
|
final List<String> jvmOptions = new ArrayList<>();
|
||||||
final SortedMap<Integer, String> invalidLines = new TreeMap<>();
|
final SortedMap<Integer, String> invalidLines = new TreeMap<>();
|
||||||
try (InputStream is = Files.newInputStream(Paths.get(args[0]));
|
try (InputStream is = Files.newInputStream(Paths.get(args[0]));
|
||||||
Reader reader = new InputStreamReader(is, Charset.forName("UTF-8"));
|
Reader reader = new InputStreamReader(is, StandardCharsets.UTF_8);
|
||||||
BufferedReader br = new BufferedReader(reader)) {
|
BufferedReader br = new BufferedReader(reader)) {
|
||||||
parse(
|
parse(
|
||||||
JavaVersion.majorVersion(JavaVersion.CURRENT),
|
JavaVersion.majorVersion(JavaVersion.CURRENT),
|
||||||
|
@ -78,7 +79,14 @@ final class JvmOptionsParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (invalidLines.isEmpty()) {
|
if (invalidLines.isEmpty()) {
|
||||||
List<String> ergonomicJvmOptions = JvmErgonomics.choose(jvmOptions);
|
// now append the JVM options from ES_JAVA_OPTS
|
||||||
|
final String environmentJvmOptions = System.getenv("ES_JAVA_OPTS");
|
||||||
|
if (environmentJvmOptions != null) {
|
||||||
|
jvmOptions.addAll(Arrays.stream(environmentJvmOptions.split("\\s+"))
|
||||||
|
.filter(s -> s.trim().isEmpty() == false)
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
final List<String> ergonomicJvmOptions = JvmErgonomics.choose(jvmOptions);
|
||||||
jvmOptions.addAll(ergonomicJvmOptions);
|
jvmOptions.addAll(ergonomicJvmOptions);
|
||||||
final String spaceDelimitedJvmOptions = spaceDelimitJvmOptions(jvmOptions);
|
final String spaceDelimitedJvmOptions = spaceDelimitJvmOptions(jvmOptions);
|
||||||
Launchers.outPrintln(spaceDelimitedJvmOptions);
|
Launchers.outPrintln(spaceDelimitedJvmOptions);
|
||||||
|
|
|
@ -19,38 +19,70 @@
|
||||||
|
|
||||||
package org.elasticsearch.tools.launchers;
|
package org.elasticsearch.tools.launchers;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
|
import static org.hamcrest.Matchers.hasToString;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertNull;
|
import static org.junit.Assert.assertThat;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
public class JvmErgonomicsTests extends LaunchersTestCase {
|
public class JvmErgonomicsTests extends LaunchersTestCase {
|
||||||
public void testExtractValidHeapSize() {
|
|
||||||
assertEquals(Long.valueOf(1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx1024")));
|
public void testExtractValidHeapSizeUsingXmx() throws InterruptedException, IOException {
|
||||||
assertEquals(Long.valueOf(2L * 1024 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx2g")));
|
assertThat(
|
||||||
assertEquals(Long.valueOf(32 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx32M")));
|
JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-Xmx2g"))),
|
||||||
assertEquals(Long.valueOf(32 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-XX:MaxHeapSize=32M")));
|
equalTo(2L << 30));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExtractInvalidHeapSize() {
|
public void testExtractValidHeapSizeUsingMaxHeapSize() throws InterruptedException, IOException {
|
||||||
|
assertThat(
|
||||||
|
JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-XX:MaxHeapSize=2g"))),
|
||||||
|
equalTo(2L << 30));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testExtractValidHeapSizeNoOptionPresent() throws InterruptedException, IOException {
|
||||||
|
assertThat(
|
||||||
|
JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.emptyList())),
|
||||||
|
greaterThan(0L));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testHeapSizeInvalid() throws InterruptedException, IOException {
|
||||||
try {
|
try {
|
||||||
JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx2T"));
|
JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-Xmx2Z")));
|
||||||
fail("Expected IllegalArgumentException to be raised");
|
fail("expected starting java to fail");
|
||||||
} catch (IllegalArgumentException expected) {
|
} catch (final RuntimeException e) {
|
||||||
assertEquals("Unknown unit [T] for max heap size in [-Xmx2T]", expected.getMessage());
|
assertThat(e, hasToString(containsString(("starting java failed"))));
|
||||||
|
assertThat(e, hasToString(containsString(("Invalid maximum heap size: -Xmx2Z"))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExtractNoHeapSize() {
|
public void testHeapSizeTooSmall() throws InterruptedException, IOException {
|
||||||
assertNull("No spaces allowed", JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx 1024")));
|
try {
|
||||||
assertNull("JVM option is not present", JvmErgonomics.extractHeapSize(Collections.singletonList("")));
|
JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-Xmx1024")));
|
||||||
assertNull("Multiple JVM options per line", JvmErgonomics.extractHeapSize(Collections.singletonList("-Xms2g -Xmx2g")));
|
fail("expected starting java to fail");
|
||||||
|
} catch (final RuntimeException e) {
|
||||||
|
assertThat(e, hasToString(containsString(("starting java failed"))));
|
||||||
|
assertThat(e, hasToString(containsString(("Too small maximum heap"))));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testHeapSizeWithSpace() throws InterruptedException, IOException {
|
||||||
|
try {
|
||||||
|
JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-Xmx 1024")));
|
||||||
|
fail("expected starting java to fail");
|
||||||
|
} catch (final RuntimeException e) {
|
||||||
|
assertThat(e, hasToString(containsString(("starting java failed"))));
|
||||||
|
assertThat(e, hasToString(containsString(("Invalid maximum heap size: -Xmx 1024"))));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExtractSystemProperties() {
|
public void testExtractSystemProperties() {
|
||||||
|
@ -69,15 +101,16 @@ public class JvmErgonomicsTests extends LaunchersTestCase {
|
||||||
assertTrue(parsedSystemProperties.isEmpty());
|
assertTrue(parsedSystemProperties.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLittleMemoryErgonomicChoices() {
|
public void testLittleMemoryErgonomicChoices() throws InterruptedException, IOException {
|
||||||
String smallHeap = randomFrom(Arrays.asList("64M", "512M", "1024M", "1G"));
|
String smallHeap = randomFrom(Arrays.asList("64M", "512M", "1024M", "1G"));
|
||||||
List<String> expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=unpooled");
|
List<String> expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=unpooled");
|
||||||
assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + smallHeap, "-Xmx" + smallHeap)));
|
assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + smallHeap, "-Xmx" + smallHeap)));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPlentyMemoryErgonomicChoices() {
|
public void testPlentyMemoryErgonomicChoices() throws InterruptedException, IOException {
|
||||||
String largeHeap = randomFrom(Arrays.asList("1025M", "2048M", "2G", "8G"));
|
String largeHeap = randomFrom(Arrays.asList("1025M", "2048M", "2G", "8G"));
|
||||||
List<String> expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=pooled");
|
List<String> expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=pooled");
|
||||||
assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + largeHeap, "-Xmx" + largeHeap)));
|
assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + largeHeap, "-Xmx" + largeHeap)));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,6 +84,10 @@ executing.
|
||||||
|
|
||||||
The below list shows the actions which are available in each phase.
|
The below list shows the actions which are available in each phase.
|
||||||
|
|
||||||
|
NOTE: The order that configured actions are performed in within each phase is
|
||||||
|
determined by automatically by {ilm-init}, and cannot be changed by changing the
|
||||||
|
policy definition.
|
||||||
|
|
||||||
* Hot
|
* Hot
|
||||||
- <<ilm-set-priority-action,Set Priority>>
|
- <<ilm-set-priority-action,Set Priority>>
|
||||||
- <<ilm-rollover-action,Rollover>>
|
- <<ilm-rollover-action,Rollover>>
|
||||||
|
|
|
@ -27,7 +27,7 @@ uncompressed. The `ingest-geoip` config directory is located at `$ES_HOME/config
|
||||||
| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document
|
| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document
|
||||||
|======
|
|======
|
||||||
|
|
||||||
*Depends on what is available in `database_field`:
|
*Depends on what is available in `database_file`:
|
||||||
|
|
||||||
* If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`,
|
* If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`,
|
||||||
`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
|
`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
org.gradle.daemon=true
|
org.gradle.daemon=true
|
||||||
org.gradle.jvmargs=-Xmx2g -XX:+HeapDumpOnOutOfMemoryError
|
org.gradle.jvmargs=-Xmx2g -XX:+HeapDumpOnOutOfMemoryError -Xss2m
|
||||||
options.forkOptions.memoryMaximumSize=2g
|
options.forkOptions.memoryMaximumSize=2g
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
distributionBase=GRADLE_USER_HOME
|
distributionBase=GRADLE_USER_HOME
|
||||||
distributionPath=wrapper/dists
|
distributionPath=wrapper/dists
|
||||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.3-all.zip
|
distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip
|
||||||
zipStoreBase=GRADLE_USER_HOME
|
zipStoreBase=GRADLE_USER_HOME
|
||||||
zipStorePath=wrapper/dists
|
zipStorePath=wrapper/dists
|
||||||
distributionSha256Sum=f4d820c2a9685710eba5b92f10e0e4fb20e0d6c0dd1f46971e658160f25e7147
|
distributionSha256Sum=14cd15fc8cc8705bd69dcfa3c8fefb27eb7027f5de4b47a8b279218f76895a91
|
||||||
|
|
|
@ -56,6 +56,7 @@ import static org.elasticsearch.packaging.util.Packages.startElasticsearch;
|
||||||
import static org.elasticsearch.packaging.util.Packages.stopElasticsearch;
|
import static org.elasticsearch.packaging.util.Packages.stopElasticsearch;
|
||||||
import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation;
|
import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation;
|
||||||
import static org.elasticsearch.packaging.util.Platforms.getOsRelease;
|
import static org.elasticsearch.packaging.util.Platforms.getOsRelease;
|
||||||
|
import static org.elasticsearch.packaging.util.Platforms.isDPKG;
|
||||||
import static org.elasticsearch.packaging.util.Platforms.isSystemd;
|
import static org.elasticsearch.packaging.util.Platforms.isSystemd;
|
||||||
import static org.elasticsearch.packaging.util.ServerUtils.makeRequest;
|
import static org.elasticsearch.packaging.util.ServerUtils.makeRequest;
|
||||||
import static org.elasticsearch.packaging.util.ServerUtils.runElasticsearchTests;
|
import static org.elasticsearch.packaging.util.ServerUtils.runElasticsearchTests;
|
||||||
|
@ -78,6 +79,11 @@ public abstract class PackageTestCase extends PackagingTestCase {
|
||||||
sh = newShell();
|
sh = newShell();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void test05CheckLintian() throws Exception {
|
||||||
|
assumeTrue(isDPKG());
|
||||||
|
sh.run("lintian --fail-on-warnings " + FileUtils.getDistributionFile(distribution()));
|
||||||
|
}
|
||||||
|
|
||||||
public void test10InstallPackage() throws Exception {
|
public void test10InstallPackage() throws Exception {
|
||||||
assertRemoved(distribution());
|
assertRemoved(distribution());
|
||||||
installation = install(distribution());
|
installation = install(distribution());
|
||||||
|
|
|
@ -157,11 +157,11 @@ public class ReadOnlyEngine extends Engine {
|
||||||
|
|
||||||
protected final DirectoryReader wrapReader(DirectoryReader reader,
|
protected final DirectoryReader wrapReader(DirectoryReader reader,
|
||||||
Function<DirectoryReader, DirectoryReader> readerWrapperFunction) throws IOException {
|
Function<DirectoryReader, DirectoryReader> readerWrapperFunction) throws IOException {
|
||||||
reader = ElasticsearchDirectoryReader.wrap(reader, engineConfig.getShardId());
|
|
||||||
if (engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
|
if (engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
|
||||||
reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD);
|
reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD);
|
||||||
}
|
}
|
||||||
return readerWrapperFunction.apply(reader);
|
reader = readerWrapperFunction.apply(reader);
|
||||||
|
return ElasticsearchDirectoryReader.wrap(reader, engineConfig.getShardId());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected DirectoryReader open(IndexCommit commit) throws IOException {
|
protected DirectoryReader open(IndexCommit commit) throws IOException {
|
||||||
|
|
|
@ -1065,6 +1065,7 @@ public class CoordinatorTests extends ESTestCase {
|
||||||
cluster1.stabilise();
|
cluster1.stabilise();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/41967")
|
||||||
public void testDiscoveryUsesNodesFromLastClusterState() {
|
public void testDiscoveryUsesNodesFromLastClusterState() {
|
||||||
final Cluster cluster = new Cluster(randomIntBetween(3, 5));
|
final Cluster cluster = new Cluster(randomIntBetween(3, 5));
|
||||||
cluster.runRandomly();
|
cluster.runRandomly();
|
||||||
|
|
|
@ -18,9 +18,12 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.engine;
|
package org.elasticsearch.index.engine;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
|
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.elasticsearch.index.seqno.SeqNoStats;
|
import org.elasticsearch.index.seqno.SeqNoStats;
|
||||||
|
@ -32,7 +35,9 @@ import java.util.List;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader.getElasticsearchDirectoryReader;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
|
||||||
public class ReadOnlyEngineTests extends EngineTestCase {
|
public class ReadOnlyEngineTests extends EngineTestCase {
|
||||||
|
|
||||||
|
@ -80,6 +85,13 @@ public class ReadOnlyEngineTests extends EngineTestCase {
|
||||||
Engine.Searcher external = readOnlyEngine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL);
|
Engine.Searcher external = readOnlyEngine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL);
|
||||||
Engine.Searcher internal = readOnlyEngine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
|
Engine.Searcher internal = readOnlyEngine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
|
||||||
assertSame(external.reader(), internal.reader());
|
assertSame(external.reader(), internal.reader());
|
||||||
|
assertThat(external.reader(), instanceOf(DirectoryReader.class));
|
||||||
|
DirectoryReader dirReader = external.getDirectoryReader();
|
||||||
|
ElasticsearchDirectoryReader esReader = getElasticsearchDirectoryReader(dirReader);
|
||||||
|
IndexReader.CacheHelper helper = esReader.getReaderCacheHelper();
|
||||||
|
assertNotNull(helper);
|
||||||
|
assertEquals(helper.getKey(), dirReader.getReaderCacheHelper().getKey());
|
||||||
|
|
||||||
IOUtils.close(external, internal);
|
IOUtils.close(external, internal);
|
||||||
// the locked down engine should still point to the previous commit
|
// the locked down engine should still point to the previous commit
|
||||||
assertThat(readOnlyEngine.getLocalCheckpoint(), equalTo(lastSeqNoStats.getLocalCheckpoint()));
|
assertThat(readOnlyEngine.getLocalCheckpoint(), equalTo(lastSeqNoStats.getLocalCheckpoint()));
|
||||||
|
@ -88,7 +100,6 @@ public class ReadOnlyEngineTests extends EngineTestCase {
|
||||||
try (Engine.GetResult getResult = readOnlyEngine.get(get, readOnlyEngine::acquireSearcher)) {
|
try (Engine.GetResult getResult = readOnlyEngine.get(get, readOnlyEngine::acquireSearcher)) {
|
||||||
assertTrue(getResult.exists());
|
assertTrue(getResult.exists());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
// Close and reopen the main engine
|
// Close and reopen the main engine
|
||||||
try (InternalEngine recoveringEngine = new InternalEngine(config)) {
|
try (InternalEngine recoveringEngine = new InternalEngine(config)) {
|
||||||
|
|
Loading…
Reference in New Issue