Logging: Make node name consistent in logger (#31588)
First, some background: we have 15 different methods to get a logger in Elasticsearch but they can be broken down into three broad categories based on what information is provided when building the logger. Just a class like: ``` private static final Logger logger = ESLoggerFactory.getLogger(ActionModule.class); ``` or: ``` protected final Logger logger = Loggers.getLogger(getClass()); ``` The class and settings: ``` this.logger = Loggers.getLogger(getClass(), settings); ``` Or more information like: ``` Loggers.getLogger("index.store.deletes", settings, shardId) ``` The goal of the "class and settings" variant is to attach the node name to the logger. Because we don't always have the settings available, we often use the "just a class" variant and get loggers without node names attached. There isn't any real consistency here. Some loggers get the node name because it is convenient and some do not. This change makes the node name available to all loggers all the time. Almost. There are some caveats are testing that I'll get to. But in *production* code the node name is node available to all loggers. This means we can stop using the "class and settings" variants to fetch loggers which was the real goal here, but a pleasant side effect is that the ndoe name is now consitent on every log line and optional by editing the logging pattern. This is all powered by setting the node name statically on a logging formatter very early in initialization. Now to tests: tests can't set the node name statically because subclasses of `ESIntegTestCase` run many nodes in the same jvm, even in the same class loader. Also, lots of tests don't run with a real node so they don't *have* a node name at all. To support multiple nodes in the same JVM tests suss out the node name from the thread name which works surprisingly well and easy to test in a nice way. For those threads that are not part of an `ESIntegTestCase` node we stick whatever useful information we can get form the thread name in the place of the node name. This allows us to keep the logger format consistent.
This commit is contained in:
parent
5f302580f9
commit
22459576d7
|
@ -1,7 +1,7 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %m%n
|
||||
|
||||
# Do not log at all if it is not really critical - we're in a benchmark
|
||||
rootLogger.level = error
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %m%n
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
|
|
|
@ -7,13 +7,13 @@ logger.action.level = debug
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.rolling.type = RollingFile
|
||||
appender.rolling.name = rolling
|
||||
appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %.-10000m%n
|
||||
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
|
@ -38,7 +38,7 @@ appender.deprecation_rolling.type = RollingFile
|
|||
appender.deprecation_rolling.name = deprecation_rolling
|
||||
appender.deprecation_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log
|
||||
appender.deprecation_rolling.layout.type = PatternLayout
|
||||
appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n
|
||||
appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %.-10000m%n
|
||||
appender.deprecation_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation-%i.log.gz
|
||||
appender.deprecation_rolling.policies.type = Policies
|
||||
appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
|
@ -55,7 +55,7 @@ appender.index_search_slowlog_rolling.type = RollingFile
|
|||
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
|
||||
appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog.log
|
||||
appender.index_search_slowlog_rolling.layout.type = PatternLayout
|
||||
appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n
|
||||
appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n
|
||||
appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog-%i.log.gz
|
||||
appender.index_search_slowlog_rolling.policies.type = Policies
|
||||
appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
|
@ -72,7 +72,7 @@ appender.index_indexing_slowlog_rolling.type = RollingFile
|
|||
appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
|
||||
appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog.log
|
||||
appender.index_indexing_slowlog_rolling.layout.type = PatternLayout
|
||||
appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n
|
||||
appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n
|
||||
appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog-%i.log.gz
|
||||
appender.index_indexing_slowlog_rolling.policies.type = Policies
|
||||
appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
|
|
|
@ -50,7 +50,7 @@ appender.index_search_slowlog_rolling.type = RollingFile
|
|||
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
|
||||
appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog.log
|
||||
appender.index_search_slowlog_rolling.layout.type = PatternLayout
|
||||
appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n
|
||||
appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n
|
||||
appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_search_slowlog-%i.log.gz
|
||||
appender.index_search_slowlog_rolling.policies.type = Policies
|
||||
appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
|
@ -103,7 +103,7 @@ appender.index_indexing_slowlog_rolling.type = RollingFile
|
|||
appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
|
||||
appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog.log
|
||||
appender.index_indexing_slowlog_rolling.layout.type = PatternLayout
|
||||
appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.-10000m%n
|
||||
appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%node_name]%marker %.-10000m%n
|
||||
appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_index_indexing_slowlog-%i.log.gz
|
||||
appender.index_indexing_slowlog_rolling.policies.type = Policies
|
||||
appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
|
|
|
@ -25,7 +25,7 @@ appender.rolling.type = RollingFile <1>
|
|||
appender.rolling.name = rolling
|
||||
appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log <2>
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.-10000m%n
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %.-10000m%n
|
||||
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz <3>
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy <4>
|
||||
|
|
|
@ -87,7 +87,7 @@ public class EvilBootstrapChecksTests extends ESTestCase {
|
|||
final boolean enforceLimits = randomBoolean();
|
||||
final IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> BootstrapChecks.check(new BootstrapContext(Settings.EMPTY, null), enforceLimits, emptyList(), "testInvalidValue"));
|
||||
() -> BootstrapChecks.check(new BootstrapContext(Settings.EMPTY, null), enforceLimits, emptyList()));
|
||||
final Matcher<String> matcher = containsString(
|
||||
"[es.enforce.bootstrap.checks] must be [true] but was [" + value + "]");
|
||||
assertThat(e, hasToString(matcher));
|
||||
|
|
|
@ -56,6 +56,7 @@ import java.util.regex.Pattern;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
|
@ -319,9 +320,11 @@ public class EvilLoggerTests extends ESTestCase {
|
|||
assertThat(events.size(), equalTo(expectedLogLines + stackTraceLength));
|
||||
for (int i = 0; i < expectedLogLines; i++) {
|
||||
if (prefix == null) {
|
||||
assertThat(events.get(i), startsWith("test"));
|
||||
assertThat("Contents of [" + path + "] are wrong",
|
||||
events.get(i), startsWith("[" + getTestName() + "] test"));
|
||||
} else {
|
||||
assertThat(events.get(i), startsWith("[" + prefix + "] test"));
|
||||
assertThat("Contents of [" + path + "] are wrong",
|
||||
events.get(i), startsWith("[" + getTestName() + "][" + prefix + "] test"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -357,6 +360,24 @@ public class EvilLoggerTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testNoNodeNameWarning() throws IOException, UserException {
|
||||
setupLogging("no_node_name");
|
||||
|
||||
final String path =
|
||||
System.getProperty("es.logs.base_path") +
|
||||
System.getProperty("file.separator") +
|
||||
System.getProperty("es.logs.cluster_name") + ".log";
|
||||
final List<String> events = Files.readAllLines(PathUtils.get(path));
|
||||
assertThat(events.size(), equalTo(2));
|
||||
final String location = "org.elasticsearch.common.logging.LogConfigurator";
|
||||
// the first message is a warning for unsupported configuration files
|
||||
assertLogLine(events.get(0), Level.WARN, location, "\\[null\\] Some logging configurations have %marker but don't "
|
||||
+ "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users "
|
||||
+ "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace "
|
||||
+ "`%node_name` with `\\[%node_name\\]%marker ` in these locations:");
|
||||
assertThat(events.get(1), endsWith("no_node_name/log4j2.properties"));
|
||||
}
|
||||
|
||||
private void setupLogging(final String config) throws IOException, UserException {
|
||||
setupLogging(config, Settings.EMPTY);
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n
|
||||
|
||||
appender.file.type = File
|
||||
appender.file.name = file
|
||||
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.file.layout.type = PatternLayout
|
||||
appender.file.layout.pattern = [%p][%l] %marker%m%n
|
||||
appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
|
@ -23,10 +23,9 @@ appender.deprecation_file.type = File
|
|||
appender.deprecation_file.name = deprecation_file
|
||||
appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log
|
||||
appender.deprecation_file.layout.type = PatternLayout
|
||||
appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n
|
||||
appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
|
||||
|
||||
logger.deprecation.name = deprecation
|
||||
logger.deprecation.level = warn
|
||||
logger.deprecation.appenderRef.deprecation_file.ref = deprecation_file
|
||||
logger.deprecation.additivity = false
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
appender.console3.type = Console
|
||||
appender.console3.name = console3
|
||||
appender.console3.layout.type = PatternLayout
|
||||
appender.console3.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
appender.console3.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n
|
||||
|
||||
logger.third.name = third
|
||||
logger.third.level = debug
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n
|
||||
|
||||
appender.file.type = File
|
||||
appender.file.name = file
|
||||
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.file.layout.type = PatternLayout
|
||||
appender.file.layout.pattern = [%p][%l] %marker%m%n
|
||||
appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
|
@ -17,7 +17,7 @@ appender.deprecation_file.type = File
|
|||
appender.deprecation_file.name = deprecation_file
|
||||
appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log
|
||||
appender.deprecation_file.layout.type = PatternLayout
|
||||
appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n
|
||||
appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
|
||||
|
||||
logger.deprecation.name = deprecation
|
||||
logger.deprecation.level = warn
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n
|
||||
|
||||
logger.has_console_appender.name = has_console_appender
|
||||
logger.has_console_appender.level = trace
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n
|
||||
|
||||
appender.file.type = File
|
||||
appender.file.name = file
|
||||
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.file.layout.type = PatternLayout
|
||||
appender.file.layout.pattern = [%p][%l] %marker%m%n
|
||||
appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
|
||||
appender.file.type = File
|
||||
appender.file.name = file
|
||||
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.file.layout.type = PatternLayout
|
||||
appender.file.layout.pattern = [%p][%l] %marker%m%n
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
rootLogger.appenderRef.file.ref = file
|
||||
|
||||
appender.deprecation_file.type = File
|
||||
appender.deprecation_file.name = deprecation_file
|
||||
appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log
|
||||
appender.deprecation_file.layout.type = PatternLayout
|
||||
appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n
|
||||
|
||||
logger.deprecation.name = deprecation
|
||||
logger.deprecation.level = warn
|
||||
logger.deprecation.appenderRef.deprecation_file.ref = deprecation_file
|
||||
logger.deprecation.additivity = false
|
|
@ -1,13 +1,13 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n
|
||||
|
||||
appender.file.type = File
|
||||
appender.file.name = file
|
||||
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.file.layout.type = PatternLayout
|
||||
appender.file.layout.pattern = %marker%m%n
|
||||
appender.file.layout.pattern = [%test_thread_info]%marker %m%n
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] [%test_thread_info]%marker %m%n
|
||||
|
||||
appender.file.type = File
|
||||
appender.file.name = file
|
||||
appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.file.layout.type = PatternLayout
|
||||
appender.file.layout.pattern = [%p][%l] %marker%m%n
|
||||
appender.file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
|
@ -17,7 +17,7 @@ appender.deprecation_file.type = File
|
|||
appender.deprecation_file.name = deprecation_file
|
||||
appender.deprecation_file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log
|
||||
appender.deprecation_file.layout.type = PatternLayout
|
||||
appender.deprecation_file.layout.pattern = [%p][%l] %marker%m%n
|
||||
appender.deprecation_file.layout.pattern = [%p][%l] [%test_thread_info]%marker %m%n
|
||||
|
||||
logger.deprecation.name = org.elasticsearch.deprecation.common.settings
|
||||
logger.deprecation.level = warn
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.inject.CreationException;
|
|||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.logging.LogConfigurator;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.NodeNamePatternConverter;
|
||||
import org.elasticsearch.common.network.IfConfig;
|
||||
import org.elasticsearch.common.settings.KeyStoreWrapper;
|
||||
import org.elasticsearch.common.settings.SecureSettings;
|
||||
|
@ -287,6 +288,10 @@ final class Bootstrap {
|
|||
|
||||
final SecureSettings keystore = loadSecureSettings(initialEnv);
|
||||
final Environment environment = createEnvironment(pidFile, keystore, initialEnv.settings(), initialEnv.configFile());
|
||||
|
||||
String nodeName = Node.NODE_NAME_SETTING.get(environment.settings());
|
||||
NodeNamePatternConverter.setNodeName(nodeName);
|
||||
|
||||
try {
|
||||
LogConfigurator.configure(environment);
|
||||
} catch (IOException e) {
|
||||
|
@ -317,8 +322,7 @@ final class Bootstrap {
|
|||
// install the default uncaught exception handler; must be done before security is
|
||||
// initialized as we do not want to grant the runtime permission
|
||||
// setDefaultUncaughtExceptionHandler
|
||||
Thread.setDefaultUncaughtExceptionHandler(
|
||||
new ElasticsearchUncaughtExceptionHandler(() -> Node.NODE_NAME_SETTING.get(environment.settings())));
|
||||
Thread.setDefaultUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler());
|
||||
|
||||
INSTANCE.setup(true, environment);
|
||||
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.common.transport.TransportAddress;
|
|||
import org.elasticsearch.discovery.DiscoveryModule;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.monitor.process.ProcessProbe;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeValidationException;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
|
@ -74,8 +73,7 @@ final class BootstrapChecks {
|
|||
combinedChecks.addAll(additionalChecks);
|
||||
check( context,
|
||||
enforceLimits(boundTransportAddress, DiscoveryModule.DISCOVERY_TYPE_SETTING.get(context.settings)),
|
||||
Collections.unmodifiableList(combinedChecks),
|
||||
Node.NODE_NAME_SETTING.get(context.settings));
|
||||
Collections.unmodifiableList(combinedChecks));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -86,14 +84,12 @@ final class BootstrapChecks {
|
|||
* @param context the current node boostrap context
|
||||
* @param enforceLimits {@code true} if the checks should be enforced or otherwise warned
|
||||
* @param checks the checks to execute
|
||||
* @param nodeName the node name to be used as a logging prefix
|
||||
*/
|
||||
static void check(
|
||||
final BootstrapContext context,
|
||||
final boolean enforceLimits,
|
||||
final List<BootstrapCheck> checks,
|
||||
final String nodeName) throws NodeValidationException {
|
||||
check(context, enforceLimits, checks, Loggers.getLogger(BootstrapChecks.class, nodeName));
|
||||
final List<BootstrapCheck> checks) throws NodeValidationException {
|
||||
check(context, enforceLimits, checks, Loggers.getLogger(BootstrapChecks.class));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -27,16 +27,9 @@ import org.elasticsearch.common.logging.Loggers;
|
|||
import java.io.IOError;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
|
||||
|
||||
private final Supplier<String> loggingPrefixSupplier;
|
||||
|
||||
ElasticsearchUncaughtExceptionHandler(final Supplier<String> loggingPrefixSupplier) {
|
||||
this.loggingPrefixSupplier = Objects.requireNonNull(loggingPrefixSupplier);
|
||||
}
|
||||
private static final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class);
|
||||
|
||||
@Override
|
||||
public void uncaughtException(Thread t, Throwable e) {
|
||||
|
@ -70,12 +63,10 @@ class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionH
|
|||
}
|
||||
|
||||
void onFatalUncaught(final String threadName, final Throwable t) {
|
||||
final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get());
|
||||
logger.error(() -> new ParameterizedMessage("fatal error in thread [{}], exiting", threadName), t);
|
||||
}
|
||||
|
||||
void onNonFatalUncaught(final String threadName, final Throwable t) {
|
||||
final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get());
|
||||
logger.warn(() -> new ParameterizedMessage("uncaught exception in thread [{}]", threadName), t);
|
||||
}
|
||||
|
||||
|
|
|
@ -46,6 +46,15 @@ public final class ESLoggerFactory {
|
|||
}
|
||||
|
||||
public static Logger getLogger(String prefix, Logger logger) {
|
||||
/*
|
||||
* In a followup we'll throw an exception if prefix is null or empty
|
||||
* redirecting folks to LogManager.getLogger.
|
||||
*
|
||||
* This and more is tracked in https://github.com/elastic/elasticsearch/issues/32174
|
||||
*/
|
||||
if (prefix == null || prefix.length() == 0) {
|
||||
return logger;
|
||||
}
|
||||
return new PrefixLogger((ExtendedLogger)logger, logger.getName(), prefix);
|
||||
}
|
||||
|
||||
|
|
|
@ -23,12 +23,16 @@ import org.apache.logging.log4j.Level;
|
|||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.core.LoggerContext;
|
||||
import org.apache.logging.log4j.core.config.AbstractConfiguration;
|
||||
import org.apache.logging.log4j.core.config.ConfigurationException;
|
||||
import org.apache.logging.log4j.core.config.ConfigurationSource;
|
||||
import org.apache.logging.log4j.core.config.Configurator;
|
||||
import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilder;
|
||||
import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilderFactory;
|
||||
import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration;
|
||||
import org.apache.logging.log4j.core.config.composite.CompositeConfiguration;
|
||||
import org.apache.logging.log4j.core.config.plugins.util.PluginManager;
|
||||
import org.apache.logging.log4j.core.config.properties.PropertiesConfiguration;
|
||||
import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationBuilder;
|
||||
import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationFactory;
|
||||
import org.apache.logging.log4j.status.StatusConsoleListener;
|
||||
import org.apache.logging.log4j.status.StatusData;
|
||||
|
@ -43,6 +47,7 @@ import org.elasticsearch.env.Environment;
|
|||
import org.elasticsearch.node.Node;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.FileVisitOption;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
|
@ -50,9 +55,12 @@ import java.nio.file.Path;
|
|||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
@ -119,6 +127,13 @@ public class LogConfigurator {
|
|||
configure(environment.settings(), environment.configFile(), environment.logsFile());
|
||||
}
|
||||
|
||||
/**
|
||||
* Load logging plugins so we can have {@code node_name} in the pattern.
|
||||
*/
|
||||
public static void loadLog4jPlugins() {
|
||||
PluginManager.addPackage(LogConfigurator.class.getPackage().getName());
|
||||
}
|
||||
|
||||
private static void checkErrorListener() {
|
||||
assert errorListenerIsRegistered() : "expected error listener to be registered";
|
||||
if (error.get()) {
|
||||
|
@ -135,6 +150,8 @@ public class LogConfigurator {
|
|||
Objects.requireNonNull(configsPath);
|
||||
Objects.requireNonNull(logsPath);
|
||||
|
||||
loadLog4jPlugins();
|
||||
|
||||
setLogConfigurationSystemProperty(logsPath, settings);
|
||||
// we initialize the status logger immediately otherwise Log4j will complain when we try to get the context
|
||||
configureStatusLogger();
|
||||
|
@ -142,7 +159,53 @@ public class LogConfigurator {
|
|||
final LoggerContext context = (LoggerContext) LogManager.getContext(false);
|
||||
|
||||
final List<AbstractConfiguration> configurations = new ArrayList<>();
|
||||
final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory();
|
||||
|
||||
/*
|
||||
* Subclass the properties configurator to hack the new pattern in
|
||||
* place so users don't have to change log4j2.properties in
|
||||
* a minor release. In 7.0 we'll remove this and force users to
|
||||
* change log4j2.properties. If they don't customize log4j2.properties
|
||||
* then they won't have to do anything anyway.
|
||||
*
|
||||
* Everything in this subclass that isn't marked as a hack is copied
|
||||
* from log4j2's source.
|
||||
*/
|
||||
Set<String> locationsWithDeprecatedPatterns = Collections.synchronizedSet(new HashSet<>());
|
||||
final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory() {
|
||||
@Override
|
||||
public PropertiesConfiguration getConfiguration(final LoggerContext loggerContext, final ConfigurationSource source) {
|
||||
final Properties properties = new Properties();
|
||||
try (InputStream configStream = source.getInputStream()) {
|
||||
properties.load(configStream);
|
||||
} catch (final IOException ioe) {
|
||||
throw new ConfigurationException("Unable to load " + source.toString(), ioe);
|
||||
}
|
||||
// Hack the new pattern into place
|
||||
for (String name : properties.stringPropertyNames()) {
|
||||
if (false == name.endsWith(".pattern")) continue;
|
||||
// Null is weird here but we can't do anything with it so ignore it
|
||||
String value = properties.getProperty(name);
|
||||
if (value == null) continue;
|
||||
// Tests don't need to be changed
|
||||
if (value.contains("%test_thread_info")) continue;
|
||||
/*
|
||||
* Patterns without a marker are sufficiently customized
|
||||
* that we don't have an opinion about them.
|
||||
*/
|
||||
if (false == value.contains("%marker")) continue;
|
||||
if (false == value.contains("%node_name")) {
|
||||
locationsWithDeprecatedPatterns.add(source.getLocation());
|
||||
properties.setProperty(name, value.replace("%marker", "[%node_name]%marker "));
|
||||
}
|
||||
}
|
||||
// end hack
|
||||
return new PropertiesConfigurationBuilder()
|
||||
.setConfigurationSource(source)
|
||||
.setRootProperties(properties)
|
||||
.setLoggerContext(loggerContext)
|
||||
.build();
|
||||
}
|
||||
};
|
||||
final Set<FileVisitOption> options = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
|
||||
Files.walkFileTree(configsPath, options, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
|
@ -163,6 +226,14 @@ public class LogConfigurator {
|
|||
context.start(new CompositeConfiguration(configurations));
|
||||
|
||||
configureLoggerLevels(settings);
|
||||
|
||||
final String deprecatedLocationsString = String.join("\n ", locationsWithDeprecatedPatterns);
|
||||
if (deprecatedLocationsString.length() > 0) {
|
||||
LogManager.getLogger(LogConfigurator.class).warn("Some logging configurations have %marker but don't have %node_name. "
|
||||
+ "We will automatically add %node_name to the pattern to ease the migration for users who customize "
|
||||
+ "log4j2.properties but will stop this behavior in 7.0. You should manually replace `%node_name` with "
|
||||
+ "`[%node_name]%marker ` in these locations:\n {}", deprecatedLocationsString);
|
||||
}
|
||||
}
|
||||
|
||||
private static void configureStatusLogger() {
|
||||
|
|
|
@ -31,13 +31,9 @@ import org.elasticsearch.common.settings.Setting;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.elasticsearch.common.util.CollectionUtils.asArrayList;
|
||||
|
||||
/**
|
||||
|
@ -71,29 +67,19 @@ public class Loggers {
|
|||
}
|
||||
|
||||
public static Logger getLogger(Class<?> clazz, Settings settings, String... prefixes) {
|
||||
final List<String> prefixesList = prefixesList(settings, prefixes);
|
||||
return Loggers.getLogger(clazz, prefixesList.toArray(new String[prefixesList.size()]));
|
||||
return Loggers.getLogger(clazz, prefixes);
|
||||
}
|
||||
|
||||
public static Logger getLogger(String loggerName, Settings settings, String... prefixes) {
|
||||
final List<String> prefixesList = prefixesList(settings, prefixes);
|
||||
return Loggers.getLogger(loggerName, prefixesList.toArray(new String[prefixesList.size()]));
|
||||
}
|
||||
|
||||
private static List<String> prefixesList(Settings settings, String... prefixes) {
|
||||
List<String> prefixesList = new ArrayList<>();
|
||||
if (Node.NODE_NAME_SETTING.exists(settings)) {
|
||||
prefixesList.add(Node.NODE_NAME_SETTING.get(settings));
|
||||
}
|
||||
if (prefixes != null && prefixes.length > 0) {
|
||||
prefixesList.addAll(asList(prefixes));
|
||||
}
|
||||
return prefixesList;
|
||||
return Loggers.getLogger(loggerName, prefixes);
|
||||
}
|
||||
|
||||
public static Logger getLogger(Logger parentLogger, String s) {
|
||||
assert parentLogger instanceof PrefixLogger;
|
||||
return ESLoggerFactory.getLogger(((PrefixLogger)parentLogger).prefix(), parentLogger.getName() + s);
|
||||
String prefix = null;
|
||||
if (parentLogger instanceof PrefixLogger) {
|
||||
prefix = ((PrefixLogger)parentLogger).prefix();
|
||||
}
|
||||
return ESLoggerFactory.getLogger(prefix, parentLogger.getName() + s);
|
||||
}
|
||||
|
||||
public static Logger getLogger(String s) {
|
||||
|
@ -126,7 +112,6 @@ public class Loggers {
|
|||
}
|
||||
}
|
||||
if (sb.length() > 0) {
|
||||
sb.append(" ");
|
||||
prefix = sb.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.logging.log4j.core.LogEvent;
|
||||
import org.apache.logging.log4j.core.config.plugins.Plugin;
|
||||
import org.apache.logging.log4j.core.pattern.ConverterKeys;
|
||||
import org.apache.logging.log4j.core.pattern.LogEventPatternConverter;
|
||||
import org.apache.logging.log4j.core.pattern.PatternConverter;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
|
||||
/**
|
||||
* Converts {@code %node_name} in log4j patterns into the current node name.
|
||||
* We *could* use a system property lookup instead but this is very explicit
|
||||
* and fails fast if we try to use the logger without initializing the node
|
||||
* name. As a bonus it ought to be ever so slightly faster because it doesn't
|
||||
* have to look up the system property every time.
|
||||
*/
|
||||
@Plugin(category = PatternConverter.CATEGORY, name = "NodeNamePatternConverter")
|
||||
@ConverterKeys({"node_name"})
|
||||
public class NodeNamePatternConverter extends LogEventPatternConverter {
|
||||
private static final SetOnce<String> NODE_NAME = new SetOnce<>();
|
||||
|
||||
/**
|
||||
* Set the name of this node.
|
||||
*/
|
||||
public static void setNodeName(String nodeName) {
|
||||
NODE_NAME.set(nodeName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by log4j2 to initialize this converter.
|
||||
*/
|
||||
public static NodeNamePatternConverter newInstance(final String[] options) {
|
||||
if (options.length > 0) {
|
||||
throw new IllegalArgumentException("no options supported but options provided: "
|
||||
+ Arrays.toString(options));
|
||||
}
|
||||
return new NodeNamePatternConverter(NODE_NAME.get());
|
||||
}
|
||||
|
||||
private final String nodeName;
|
||||
|
||||
private NodeNamePatternConverter(String nodeName) {
|
||||
super("NodeName", "node_name");
|
||||
this.nodeName = nodeName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void format(LogEvent event, StringBuilder toAppendTo) {
|
||||
toAppendTo.append(nodeName);
|
||||
}
|
||||
}
|
|
@ -231,7 +231,14 @@ public class Node implements Closeable {
|
|||
}
|
||||
|
||||
private static final String CLIENT_TYPE = "node";
|
||||
|
||||
private final Lifecycle lifecycle = new Lifecycle();
|
||||
|
||||
/**
|
||||
* Logger initialized in the ctor because if it were initialized statically
|
||||
* then it wouldn't get the node name.
|
||||
*/
|
||||
private final Logger logger;
|
||||
private final Injector injector;
|
||||
private final Settings settings;
|
||||
private final Settings originalSettings;
|
||||
|
@ -257,13 +264,9 @@ public class Node implements Closeable {
|
|||
}
|
||||
|
||||
protected Node(final Environment environment, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
logger = Loggers.getLogger(Node.class);
|
||||
final List<Closeable> resourcesToClose = new ArrayList<>(); // register everything we need to release in the case of an error
|
||||
boolean success = false;
|
||||
{
|
||||
// use temp logger just to say we are starting. we can't use it later on because the node name might not be set
|
||||
Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(environment.settings()));
|
||||
logger.info("initializing ...");
|
||||
}
|
||||
try {
|
||||
originalSettings = environment.settings();
|
||||
Settings tmpSettings = Settings.builder().put(environment.settings())
|
||||
|
@ -279,7 +282,6 @@ public class Node implements Closeable {
|
|||
final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings);
|
||||
final String nodeId = nodeEnvironment.nodeId();
|
||||
tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeId);
|
||||
final Logger logger = Loggers.getLogger(Node.class, tmpSettings);
|
||||
// this must be captured after the node name is possibly added to the settings
|
||||
final String nodeName = NODE_NAME_SETTING.get(tmpSettings);
|
||||
if (hadPredefinedNodeName == false) {
|
||||
|
@ -631,7 +633,6 @@ public class Node implements Closeable {
|
|||
return this;
|
||||
}
|
||||
|
||||
Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings));
|
||||
logger.info("starting ...");
|
||||
pluginLifecycleComponents.forEach(LifecycleComponent::start);
|
||||
|
||||
|
@ -742,7 +743,6 @@ public class Node implements Closeable {
|
|||
if (!lifecycle.moveToStopped()) {
|
||||
return this;
|
||||
}
|
||||
Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings));
|
||||
logger.info("stopping ...");
|
||||
|
||||
injector.getInstance(ResourceWatcherService.class).stop();
|
||||
|
@ -785,7 +785,6 @@ public class Node implements Closeable {
|
|||
return;
|
||||
}
|
||||
|
||||
Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings));
|
||||
logger.info("closing ...");
|
||||
List<Closeable> toClose = new ArrayList<>();
|
||||
StopWatch stopWatch = new StopWatch("node_close");
|
||||
|
|
|
@ -131,7 +131,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e =
|
||||
expectThrows(NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, checks, "testExceptionAggregation"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, checks));
|
||||
assertThat(e, hasToString(allOf(containsString("bootstrap checks failed"), containsString("first"), containsString("second"))));
|
||||
final Throwable[] suppressed = e.getSuppressed();
|
||||
assertThat(suppressed.length, equalTo(2));
|
||||
|
@ -162,7 +162,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
final NodeValidationException e =
|
||||
expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)));
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
containsString("initial heap size [" + initialHeapSize.get() + "] " +
|
||||
|
@ -170,7 +170,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
initialHeapSize.set(maxHeapSize.get());
|
||||
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
|
||||
// nothing should happen if the initial heap size or the max
|
||||
// heap size is not available
|
||||
|
@ -179,7 +179,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
} else {
|
||||
maxHeapSize.set(0);
|
||||
}
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testHeapSizeCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
}
|
||||
|
||||
public void testFileDescriptorLimits() throws NodeValidationException {
|
||||
|
@ -205,17 +205,17 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e =
|
||||
expectThrows(NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)));
|
||||
assertThat(e.getMessage(), containsString("max file descriptors"));
|
||||
|
||||
maxFileDescriptorCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE));
|
||||
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
|
||||
// nothing should happen if current file descriptor count is
|
||||
// not available
|
||||
maxFileDescriptorCount.set(-1);
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testFileDescriptorLimits");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
}
|
||||
|
||||
public void testFileDescriptorLimitsThrowsOnInvalidLimit() {
|
||||
|
@ -262,15 +262,13 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
() -> BootstrapChecks.check(
|
||||
bootstrapContext,
|
||||
true,
|
||||
Collections.singletonList(check),
|
||||
"testFileDescriptorLimitsThrowsOnInvalidLimit"));
|
||||
Collections.singletonList(check)));
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
containsString("memory locking requested for elasticsearch process but memory is not locked"));
|
||||
} else {
|
||||
// nothing should happen
|
||||
BootstrapChecks.check(bootstrapContext, true, Collections.singletonList(check),
|
||||
"testFileDescriptorLimitsThrowsOnInvalidLimit");
|
||||
BootstrapChecks.check(bootstrapContext, true, Collections.singletonList(check));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -287,17 +285,17 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)));
|
||||
assertThat(e.getMessage(), containsString("max number of threads"));
|
||||
|
||||
maxNumberOfThreads.set(randomIntBetween(limit + 1, Integer.MAX_VALUE));
|
||||
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
|
||||
// nothing should happen if current max number of threads is
|
||||
// not available
|
||||
maxNumberOfThreads.set(-1);
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
}
|
||||
|
||||
public void testMaxSizeVirtualMemory() throws NodeValidationException {
|
||||
|
@ -317,16 +315,16 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)));
|
||||
assertThat(e.getMessage(), containsString("max size virtual memory"));
|
||||
|
||||
maxSizeVirtualMemory.set(rlimInfinity);
|
||||
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
|
||||
// nothing should happen if max size virtual memory is not available
|
||||
maxSizeVirtualMemory.set(Long.MIN_VALUE);
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxSizeVirtualMemory");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
}
|
||||
|
||||
public void testMaxFileSizeCheck() throws NodeValidationException {
|
||||
|
@ -346,16 +344,16 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)));
|
||||
assertThat(e.getMessage(), containsString("max file size"));
|
||||
|
||||
maxFileSize.set(rlimInfinity);
|
||||
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
|
||||
// nothing should happen if max file size is not available
|
||||
maxFileSize.set(Long.MIN_VALUE);
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxFileSize");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
}
|
||||
|
||||
public void testMaxMapCountCheck() throws NodeValidationException {
|
||||
|
@ -370,17 +368,17 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)));
|
||||
assertThat(e.getMessage(), containsString("max virtual memory areas vm.max_map_count"));
|
||||
|
||||
maxMapCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE));
|
||||
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
|
||||
// nothing should happen if current vm.max_map_count is not
|
||||
// available
|
||||
maxMapCount.set(-1);
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testMaxMapCountCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
}
|
||||
|
||||
public void testClientJvmCheck() throws NodeValidationException {
|
||||
|
@ -394,14 +392,14 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testClientJvmCheck"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)));
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
containsString("JVM is using the client VM [Java HotSpot(TM) 32-Bit Client VM] " +
|
||||
"but should be using a server VM for the best performance"));
|
||||
|
||||
vmName.set("Java HotSpot(TM) 32-Bit Server VM");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testClientJvmCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
}
|
||||
|
||||
public void testUseSerialGCCheck() throws NodeValidationException {
|
||||
|
@ -415,14 +413,14 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testUseSerialGCCheck"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(check)));
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
containsString("JVM is using the serial collector but should not be for the best performance; " + "" +
|
||||
"either it's the default for the VM [" + JvmInfo.jvmInfo().getVmName() +"] or -XX:+UseSerialGC was explicitly specified"));
|
||||
|
||||
useSerialGC.set("false");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), "testUseSerialGCCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
}
|
||||
|
||||
public void testSystemCallFilterCheck() throws NodeValidationException {
|
||||
|
@ -439,15 +437,14 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck),
|
||||
"testSystemCallFilterCheck"));
|
||||
() -> BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck)));
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
containsString("system call filters failed to install; " +
|
||||
"check the logs and fix your configuration or disable system call filters at your own risk"));
|
||||
|
||||
isSystemCallFilterInstalled.set(true);
|
||||
BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck), "testSystemCallFilterCheck");
|
||||
BootstrapChecks.check(context, true, Collections.singletonList(systemCallFilterEnabledCheck));
|
||||
BootstrapContext context_1 = new BootstrapContext(Settings.builder().put("bootstrap.system_call_filter", false).build(), null);
|
||||
final BootstrapChecks.SystemCallFilterCheck systemCallFilterNotEnabledCheck = new BootstrapChecks.SystemCallFilterCheck() {
|
||||
@Override
|
||||
|
@ -456,9 +453,9 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
}
|
||||
};
|
||||
isSystemCallFilterInstalled.set(false);
|
||||
BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck), "testSystemCallFilterCheck");
|
||||
BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck));
|
||||
isSystemCallFilterInstalled.set(true);
|
||||
BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck), "testSystemCallFilterCheck");
|
||||
BootstrapChecks.check(context_1, true, Collections.singletonList(systemCallFilterNotEnabledCheck));
|
||||
}
|
||||
|
||||
public void testMightForkCheck() throws NodeValidationException {
|
||||
|
@ -553,8 +550,6 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
final Runnable enableMightFork,
|
||||
final Consumer<NodeValidationException> consumer) throws NodeValidationException {
|
||||
|
||||
final String methodName = Thread.currentThread().getStackTrace()[2].getMethodName();
|
||||
|
||||
// if system call filter is disabled, nothing should happen
|
||||
isSystemCallFilterInstalled.set(false);
|
||||
if (randomBoolean()) {
|
||||
|
@ -562,13 +557,13 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
} else {
|
||||
enableMightFork.run();
|
||||
}
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), methodName);
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
|
||||
// if system call filter is enabled, but we will not fork, nothing should
|
||||
// happen
|
||||
isSystemCallFilterInstalled.set(true);
|
||||
disableMightFork.run();
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check), methodName);
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(check));
|
||||
|
||||
// if system call filter is enabled, and we might fork, the check should be enforced, regardless of bootstrap checks being enabled
|
||||
// or not
|
||||
|
@ -577,7 +572,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check), methodName));
|
||||
() -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check)));
|
||||
consumer.accept(e);
|
||||
}
|
||||
|
||||
|
@ -602,7 +597,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> {
|
||||
BootstrapChecks.check(defaultContext, true, checks, "testEarlyAccessCheck");
|
||||
BootstrapChecks.check(defaultContext, true, checks);
|
||||
});
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
|
@ -613,7 +608,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
// if not on an early-access build, nothing should happen
|
||||
javaVersion.set(randomFrom("1.8.0_152", "9"));
|
||||
BootstrapChecks.check(defaultContext, true, checks, "testEarlyAccessCheck");
|
||||
BootstrapChecks.check(defaultContext, true, checks);
|
||||
|
||||
}
|
||||
|
||||
|
@ -649,7 +644,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
final NodeValidationException e =
|
||||
expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck)));
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
containsString(
|
||||
|
@ -657,12 +652,12 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
// if G1GC is disabled, nothing should happen
|
||||
isG1GCEnabled.set(false);
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck));
|
||||
|
||||
// if on or after update 40, nothing should happen independent of whether or not G1GC is enabled
|
||||
isG1GCEnabled.set(randomBoolean());
|
||||
jvmVersion.set(String.format(Locale.ROOT, "25.%d-b%d", randomIntBetween(40, 112), randomIntBetween(1, 128)));
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck), "testG1GCCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(g1GCCheck));
|
||||
|
||||
final BootstrapChecks.G1GCCheck nonOracleCheck = new BootstrapChecks.G1GCCheck() {
|
||||
|
||||
|
@ -674,7 +669,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
};
|
||||
|
||||
// if not on an Oracle JVM, nothing should happen
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonOracleCheck), "testG1GCCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonOracleCheck));
|
||||
|
||||
final BootstrapChecks.G1GCCheck nonJava8Check = new BootstrapChecks.G1GCCheck() {
|
||||
|
||||
|
@ -686,7 +681,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
};
|
||||
|
||||
// if not Java 8, nothing should happen
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonJava8Check), "testG1GCCheck");
|
||||
BootstrapChecks.check(defaultContext, true, Collections.singletonList(nonJava8Check));
|
||||
}
|
||||
|
||||
public void testAllPermissionCheck() throws NodeValidationException {
|
||||
|
@ -701,12 +696,12 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
final List<BootstrapCheck> checks = Collections.singletonList(allPermissionCheck);
|
||||
final NodeValidationException e = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, true, checks, "testIsAllPermissionCheck"));
|
||||
() -> BootstrapChecks.check(defaultContext, true, checks));
|
||||
assertThat(e, hasToString(containsString("granting the all permission effectively disables security")));
|
||||
|
||||
// if all permissions are not granted, nothing should happen
|
||||
isAllPermissionGranted.set(false);
|
||||
BootstrapChecks.check(defaultContext, true, checks, "testIsAllPermissionCheck");
|
||||
BootstrapChecks.check(defaultContext, true, checks);
|
||||
}
|
||||
|
||||
public void testAlwaysEnforcedChecks() {
|
||||
|
@ -724,7 +719,7 @@ public class BootstrapChecksTests extends ESTestCase {
|
|||
|
||||
final NodeValidationException alwaysEnforced = expectThrows(
|
||||
NodeValidationException.class,
|
||||
() -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check), "testAlwaysEnforcedChecks"));
|
||||
() -> BootstrapChecks.check(defaultContext, randomBoolean(), Collections.singletonList(check)));
|
||||
assertThat(alwaysEnforced, hasToString(containsString("error")));
|
||||
}
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ public class ElasticsearchUncaughtExceptionHandlerTests extends ESTestCase {
|
|||
final AtomicInteger observedStatus = new AtomicInteger();
|
||||
final AtomicReference<String> threadNameReference = new AtomicReference<>();
|
||||
final AtomicReference<Throwable> throwableReference = new AtomicReference<>();
|
||||
thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(() -> "testUncaughtError") {
|
||||
thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler() {
|
||||
|
||||
@Override
|
||||
void halt(int status) {
|
||||
|
@ -106,7 +106,7 @@ public class ElasticsearchUncaughtExceptionHandlerTests extends ESTestCase {
|
|||
thread.setName(name);
|
||||
final AtomicReference<String> threadNameReference = new AtomicReference<>();
|
||||
final AtomicReference<Throwable> throwableReference = new AtomicReference<>();
|
||||
thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(() -> "testUncaughtException") {
|
||||
thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler() {
|
||||
@Override
|
||||
void halt(int status) {
|
||||
fail();
|
||||
|
|
|
@ -1978,7 +1978,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
@Override
|
||||
public void append(LogEvent event) {
|
||||
final String formattedMessage = event.getMessage().getFormattedMessage();
|
||||
if (event.getLevel() == Level.TRACE && event.getMarker().getName().contains("[index][0] ")) {
|
||||
if (event.getLevel() == Level.TRACE && event.getMarker().getName().contains("[index][0]")) {
|
||||
if (event.getLoggerName().endsWith(".IW") &&
|
||||
formattedMessage.contains("IW: now apply all deletes")) {
|
||||
sawIndexWriterMessage = true;
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.logging.log4j.core.LogEvent;
|
||||
import org.apache.logging.log4j.core.config.plugins.Plugin;
|
||||
import org.apache.logging.log4j.core.pattern.ConverterKeys;
|
||||
import org.apache.logging.log4j.core.pattern.LogEventPatternConverter;
|
||||
import org.apache.logging.log4j.core.pattern.PatternConverter;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
/**
|
||||
* Converts {@code %test_thread_info} in log4j patterns into information
|
||||
* based on the loggin thread's name. If that thread is part of an
|
||||
* {@link ESIntegTestCase} then this information is the node name.
|
||||
*/
|
||||
@Plugin(category = PatternConverter.CATEGORY, name = "TestInfoPatternConverter")
|
||||
@ConverterKeys({"test_thread_info"})
|
||||
public class TestThreadInfoPatternConverter extends LogEventPatternConverter {
|
||||
/**
|
||||
* Called by log4j2 to initialize this converter.
|
||||
*/
|
||||
public static TestThreadInfoPatternConverter newInstance(final String[] options) {
|
||||
if (options.length > 0) {
|
||||
throw new IllegalArgumentException("no options supported but options provided: "
|
||||
+ Arrays.toString(options));
|
||||
}
|
||||
return new TestThreadInfoPatternConverter();
|
||||
}
|
||||
|
||||
private TestThreadInfoPatternConverter() {
|
||||
super("TestInfo", "test_thread_info");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void format(LogEvent event, StringBuilder toAppendTo) {
|
||||
toAppendTo.append(threadInfo(event.getThreadName()));
|
||||
}
|
||||
|
||||
private static final Pattern ELASTICSEARCH_THREAD_NAME_PATTERN =
|
||||
Pattern.compile("elasticsearch\\[(.+)\\]\\[.+\\].+");
|
||||
private static final Pattern TEST_THREAD_NAME_PATTERN =
|
||||
Pattern.compile("TEST-.+\\.(.+)-seed#\\[.+\\]");
|
||||
private static final Pattern TEST_SUITE_INIT_THREAD_NAME_PATTERN =
|
||||
Pattern.compile("SUITE-.+-worker");
|
||||
private static final Pattern NOT_YET_NAMED_NODE_THREAD_NAME_PATTERN =
|
||||
Pattern.compile("test_SUITE-CHILD_VM.+cluster\\[T#(.+)\\]");
|
||||
static String threadInfo(String threadName) {
|
||||
Matcher m = ELASTICSEARCH_THREAD_NAME_PATTERN.matcher(threadName);
|
||||
if (m.matches()) {
|
||||
// Thread looks like a node thread so use the node name
|
||||
return m.group(1);
|
||||
}
|
||||
m = TEST_THREAD_NAME_PATTERN.matcher(threadName);
|
||||
if (m.matches()) {
|
||||
/*
|
||||
* Thread looks like a test thread so use the test method name.
|
||||
* It'd be pretty reasonable not to use a prefix at all here but
|
||||
* the logger layout pretty much expects one and the test method
|
||||
* can be pretty nice to have around anyway.
|
||||
*/
|
||||
return m.group(1);
|
||||
}
|
||||
m = TEST_SUITE_INIT_THREAD_NAME_PATTERN.matcher(threadName);
|
||||
if (m.matches()) {
|
||||
/*
|
||||
* Thread looks like test suite initialization or tead down and
|
||||
* we don't have any more information to give. Like above, we
|
||||
* could spit out nothing here but the logger layout expect
|
||||
* something and it *is* nice to know what lines come from test
|
||||
* teardown and initialization.
|
||||
*/
|
||||
return "suite";
|
||||
}
|
||||
m = NOT_YET_NAMED_NODE_THREAD_NAME_PATTERN.matcher(threadName);
|
||||
if (m.matches()) {
|
||||
/*
|
||||
* These are as yet unnamed integ test nodes. I'd prefer to log
|
||||
* the node name but I don't have it yet.
|
||||
*/
|
||||
return "integ_" + m.group(1) + "";
|
||||
}
|
||||
/*
|
||||
* These are uncategorized threads. We log the entire thread name in
|
||||
* case it is useful. We wrap it in `[]` so you tell that it is a
|
||||
* thread name rather than a node name or something.
|
||||
*/
|
||||
return "[" + threadName + "]";
|
||||
}
|
||||
}
|
|
@ -65,6 +65,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.LogConfigurator;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -200,6 +201,8 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
System.setProperty("log4j.shutdownHookEnabled", "false");
|
||||
System.setProperty("log4j2.disable.jmx", "true");
|
||||
|
||||
LogConfigurator.loadLog4jPlugins();
|
||||
|
||||
// Enable Netty leak detection and monitor logger for logged leak errors
|
||||
System.setProperty("io.netty.leakDetection.level", "paranoid");
|
||||
String leakLoggerName = "io.netty.util.ResourceLeakDetector";
|
||||
|
@ -321,7 +324,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
|
||||
@Before
|
||||
public final void before() {
|
||||
logger.info("[{}]: before test", getTestName());
|
||||
logger.info("{}before test", getTestParamsForLogging());
|
||||
assertNull("Thread context initialized twice", threadContext);
|
||||
if (enableWarningsCheck()) {
|
||||
this.threadContext = new ThreadContext(Settings.EMPTY);
|
||||
|
@ -350,7 +353,16 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
}
|
||||
ensureAllSearchContextsReleased();
|
||||
ensureCheckIndexPassed();
|
||||
logger.info("[{}]: after test", getTestName());
|
||||
logger.info("{}after test", getTestParamsForLogging());
|
||||
}
|
||||
|
||||
private String getTestParamsForLogging() {
|
||||
String name = getTestName();
|
||||
int start = name.indexOf('{');
|
||||
if (start < 0) return "";
|
||||
int end = name.lastIndexOf('}');
|
||||
if (end < 0) return "";
|
||||
return "[" + name.substring(start + 1, end) + "] ";
|
||||
}
|
||||
|
||||
private void ensureNoWarnings() throws IOException {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%test_thread_info]%marker %m%n
|
||||
|
||||
rootLogger.level = ${sys:tests.es.logger.level:-info}
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
# RandomizedRunner renames threads and we'd like to look at accurate thread
|
||||
# names. Also, this is the default in log4j2's master branch for versions of
|
||||
# Java after 1.8_102 which most of the versions we use are anyway. After that
|
||||
# version of Java calling `Thread.getName()` doesn't allocate so UNCACHED
|
||||
# ought to be faster than CACHED because it doesn't have to deal with
|
||||
# ThreadLocals.
|
||||
# While we don't use AsyncLogger, we do end up with mutable log events and
|
||||
# those use this key for configuration.
|
||||
AsyncLogger.ThreadNameStrategy = UNCACHED
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import static org.elasticsearch.common.logging.TestThreadInfoPatternConverter.threadInfo;
|
||||
|
||||
public class TestThreadInfoPatternConverterTests extends ESTestCase {
|
||||
private static String suiteInfo;
|
||||
|
||||
@BeforeClass
|
||||
public static void captureSuiteInfo() {
|
||||
suiteInfo = threadInfo(Thread.currentThread().getName());
|
||||
}
|
||||
|
||||
public void testThreadInfo() {
|
||||
// Threads that are part of a node get the node name
|
||||
String nodeName = randomAlphaOfLength(5);
|
||||
String threadName = EsExecutors.threadName(nodeName, randomAlphaOfLength(20))
|
||||
+ "[T#" + between(0, 1000) + "]";
|
||||
assertEquals(nodeName, threadInfo(threadName));
|
||||
|
||||
// Test threads get the test name
|
||||
assertEquals(getTestName(), threadInfo(Thread.currentThread().getName()));
|
||||
|
||||
// Suite initalization gets "suite"
|
||||
assertEquals("suite", suiteInfo);
|
||||
|
||||
// And stuff that doesn't match anything gets wrapped in [] so we can see it
|
||||
String unmatched = randomAlphaOfLength(5);
|
||||
assertEquals("[" + unmatched + "]", threadInfo(unmatched));
|
||||
}
|
||||
}
|
|
@ -363,9 +363,16 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand {
|
|||
final Logger logger = ESLoggerFactory.getLogger(ESNativeRealmMigrateTool.class);
|
||||
Loggers.setLevel(logger, Level.ALL);
|
||||
|
||||
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
|
||||
final Configuration config = ctx.getConfiguration();
|
||||
|
||||
// create appender
|
||||
final Appender appender = new AbstractAppender(ESNativeRealmMigrateTool.class.getName(), null,
|
||||
PatternLayout.newBuilder().withPattern("%m").build()) {
|
||||
PatternLayout.newBuilder()
|
||||
// Specify the configuration so log4j doesn't re-initialize
|
||||
.withConfiguration(config)
|
||||
.withPattern("%m")
|
||||
.build()) {
|
||||
@Override
|
||||
public void append(LogEvent event) {
|
||||
switch (event.getLevel().getStandardLevel()) {
|
||||
|
@ -384,8 +391,6 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand {
|
|||
appender.start();
|
||||
|
||||
// get the config, detach from parent, remove appenders, add custom appender
|
||||
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
|
||||
final Configuration config = ctx.getConfiguration();
|
||||
final LoggerConfig loggerConfig = config.getLoggerConfig(ESNativeRealmMigrateTool.class.getName());
|
||||
loggerConfig.setParent(null);
|
||||
loggerConfig.getAppenders().forEach((s, a) -> Loggers.removeAppender(logger, a));
|
||||
|
|
Loading…
Reference in New Issue