JSON logging refactoring and X-Opaque-ID support backport(#41354) (#44178)

This is a refactor to current JSON logging to make it more open for extensions
and support for custom ES log messages used inDeprecationLogger IndexingSlowLog , SearchSLowLog
We want to include x-opaque-id in deprecation logs. The easiest way to have this as an additional JSON field instead of part of the message is to create a custom DeprecatedMessage (extends ESLogMEssage)

These messages are regular log4j messages with a text, but also carry a map of fields which can then populate the log pattern. The logic for this lives in ESJsonLayout and ESMessageFieldConverter.

Similar approach can be used to refactor IndexingSlowLog and SearchSlowLog JSON logs to contain fields previously only present as escaped JSON string in a message field.

closes #41350
 backport #41354
This commit is contained in:
Przemyslaw Gomulka 2019-07-12 16:53:27 +02:00 committed by GitHub
parent dd5f4ae00e
commit e23ecc5838
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 900 additions and 140 deletions

View File

@ -16,6 +16,7 @@ appender.deprecation_rolling.type = Console
appender.deprecation_rolling.name = deprecation_rolling appender.deprecation_rolling.name = deprecation_rolling
appender.deprecation_rolling.layout.type = ESJsonLayout appender.deprecation_rolling.layout.type = ESJsonLayout
appender.deprecation_rolling.layout.type_name = deprecation appender.deprecation_rolling.layout.type_name = deprecation
appender.deprecation_rolling.layout.esmessagefields=x-opaque-id
logger.deprecation.name = org.elasticsearch.deprecation logger.deprecation.name = org.elasticsearch.deprecation
logger.deprecation.level = warn logger.deprecation.level = warn
@ -26,6 +27,7 @@ appender.index_search_slowlog_rolling.type = Console
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
appender.index_search_slowlog_rolling.layout.type = ESJsonLayout appender.index_search_slowlog_rolling.layout.type = ESJsonLayout
appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog
appender.index_search_slowlog_rolling.layout.esmessagefields=message,took,took_millis,total_hits,stats,search_type,total_shards,source,id
logger.index_search_slowlog_rolling.name = index.search.slowlog logger.index_search_slowlog_rolling.name = index.search.slowlog
logger.index_search_slowlog_rolling.level = trace logger.index_search_slowlog_rolling.level = trace
@ -36,6 +38,7 @@ appender.index_indexing_slowlog_rolling.type = Console
appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
appender.index_indexing_slowlog_rolling.layout.type = ESJsonLayout appender.index_indexing_slowlog_rolling.layout.type = ESJsonLayout
appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog
appender.index_indexing_slowlog_rolling.layout.esmessagefields=message,took,took_millis,doc_type,id,routing,source
logger.index_indexing_slowlog.name = index.indexing.slowlog.index logger.index_indexing_slowlog.name = index.indexing.slowlog.index
logger.index_indexing_slowlog.level = trace logger.index_indexing_slowlog.level = trace

View File

@ -67,6 +67,7 @@ appender.deprecation_rolling.name = deprecation_rolling
appender.deprecation_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.json appender.deprecation_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.json
appender.deprecation_rolling.layout.type = ESJsonLayout appender.deprecation_rolling.layout.type = ESJsonLayout
appender.deprecation_rolling.layout.type_name = deprecation appender.deprecation_rolling.layout.type_name = deprecation
appender.deprecation_rolling.layout.esmessagefields=x-opaque-id
appender.deprecation_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation-%i.json.gz appender.deprecation_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation-%i.json.gz
appender.deprecation_rolling.policies.type = Policies appender.deprecation_rolling.policies.type = Policies
@ -103,6 +104,7 @@ appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:f
.cluster_name}_index_search_slowlog.json .cluster_name}_index_search_slowlog.json
appender.index_search_slowlog_rolling.layout.type = ESJsonLayout appender.index_search_slowlog_rolling.layout.type = ESJsonLayout
appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog
appender.index_search_slowlog_rolling.layout.esmessagefields=message,took,took_millis,total_hits,stats,search_type,total_shards,source,id
appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\ appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
.cluster_name}_index_search_slowlog-%i.json.gz .cluster_name}_index_search_slowlog-%i.json.gz
@ -141,6 +143,7 @@ appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys
_index_indexing_slowlog.json _index_indexing_slowlog.json
appender.index_indexing_slowlog_rolling.layout.type = ESJsonLayout appender.index_indexing_slowlog_rolling.layout.type = ESJsonLayout
appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog
appender.index_indexing_slowlog_rolling.layout.esmessagefields=message,took,took_millis,doc_type,id,routing,source
appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\ appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
_index_indexing_slowlog-%i.json.gz _index_indexing_slowlog-%i.json.gz

View File

@ -0,0 +1,97 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.logging;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import org.junit.BeforeClass;
public class ESJsonLayoutTests extends ESTestCase {
@BeforeClass
public static void initNodeName() {
JsonLogsTestSetup.init();
}
public void testEmptyType() {
expectThrows(IllegalArgumentException.class, () -> ESJsonLayout.newBuilder().build());
}
public void testLayout() {
ESJsonLayout server = ESJsonLayout.newBuilder()
.setType("server")
.build();
String conversionPattern = server.getPatternLayout().getConversionPattern();
assertThat(conversionPattern, Matchers.equalTo(
"{" +
"\"type\": \"server\", " +
"\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}\", " +
"\"level\": \"%p\", " +
"\"component\": \"%c{1.}\", " +
"\"cluster.name\": \"${sys:es.logs.cluster_name}\", " +
"\"node.name\": \"%node_name\", " +
"\"message\": \"%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}\"" +
"%notEmpty{, %node_and_cluster_id }" +
"%exceptionAsJson }\n"));
}
public void testLayoutWithAdditionalFields() {
ESJsonLayout server = ESJsonLayout.newBuilder()
.setType("server")
.setESMessageFields("x-opaque-id,someOtherField")
.build();
String conversionPattern = server.getPatternLayout().getConversionPattern();
assertThat(conversionPattern, Matchers.equalTo(
"{" +
"\"type\": \"server\", " +
"\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}\", " +
"\"level\": \"%p\", " +
"\"component\": \"%c{1.}\", " +
"\"cluster.name\": \"${sys:es.logs.cluster_name}\", " +
"\"node.name\": \"%node_name\", " +
"\"message\": \"%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}\"" +
"%notEmpty{, \"x-opaque-id\": \"%ESMessageField{x-opaque-id}\"}" +
"%notEmpty{, \"someOtherField\": \"%ESMessageField{someOtherField}\"}" +
"%notEmpty{, %node_and_cluster_id }" +
"%exceptionAsJson }\n"));
}
public void testLayoutWithAdditionalFieldOverride() {
ESJsonLayout server = ESJsonLayout.newBuilder()
.setType("server")
.setESMessageFields("message")
.build();
String conversionPattern = server.getPatternLayout().getConversionPattern();
assertThat(conversionPattern, Matchers.equalTo(
"{" +
"\"type\": \"server\", " +
"\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}\", " +
"\"level\": \"%p\", " +
"\"component\": \"%c{1.}\", " +
"\"cluster.name\": \"${sys:es.logs.cluster_name}\", " +
"\"node.name\": \"%node_name\"" +
"%notEmpty{, \"message\": \"%ESMessageField{message}\"}" +
"%notEmpty{, %node_and_cluster_id }" +
"%exceptionAsJson }\n"));
}
}

View File

@ -38,19 +38,28 @@ import org.junit.BeforeClass;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
/** /**
* This test confirms JSON log structure is properly formatted and can be parsed. * This test confirms JSON log structure is properly formatted and can be parsed.
* It has to be in a <code>org.elasticsearch.common.logging</code> package to use <code>PrefixLogger</code> * It has to be in a <code>org.elasticsearch.common.logging</code> package to use <code>PrefixLogger</code>
*/ */
public class JsonLoggerTests extends ESTestCase { public class JsonLoggerTests extends ESTestCase {
private static final String LINE_SEPARATOR = System.lineSeparator(); private static final String LINE_SEPARATOR = System.lineSeparator();
@BeforeClass @BeforeClass
public static void initNodeName() { public static void initNodeName() {
LogConfigurator.setNodeName("sample-name"); JsonLogsTestSetup.init();
} }
@Override @Override
@ -66,6 +75,83 @@ public class JsonLoggerTests extends ESTestCase {
Configurator.shutdown(context); Configurator.shutdown(context);
super.tearDown(); super.tearDown();
} }
public void testDeprecatedMessage() throws IOException {
final Logger testLogger = LogManager.getLogger("test");
testLogger.info(new DeprecatedMessage("deprecated message1", "someId"));
final Path path = PathUtils.get(System.getProperty("es.logs.base_path"),
System.getProperty("es.logs.cluster_name") + "_deprecated.json");
try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) {
List<Map<String, String>> jsonLogs = stream
.collect(Collectors.toList());
assertThat(jsonLogs, contains(
allOf(
hasEntry("type", "deprecated"),
hasEntry("level", "INFO"),
hasEntry("component", "test"),
hasEntry("cluster.name", "elasticsearch"),
hasEntry("node.name", "sample-name"),
hasEntry("message", "deprecated message1"),
hasEntry("x-opaque-id", "someId"))
)
);
}
}
public void testDeprecatedMessageWithoutXOpaqueId() throws IOException {
final Logger testLogger = LogManager.getLogger("test");
testLogger.info(new DeprecatedMessage("deprecated message1", "someId"));
testLogger.info(new DeprecatedMessage("deprecated message2", ""));
testLogger.info(new DeprecatedMessage("deprecated message3", null));
testLogger.info("deprecated message4");
final Path path = PathUtils.get(System.getProperty("es.logs.base_path"),
System.getProperty("es.logs.cluster_name") + "_deprecated.json");
try (Stream<Map<String, String>> stream = JsonLogsStream.mapStreamFrom(path)) {
List<Map<String, String>> jsonLogs = stream
.collect(Collectors.toList());
assertThat(jsonLogs, contains(
allOf(
hasEntry("type", "deprecated"),
hasEntry("level", "INFO"),
hasEntry("component", "test"),
hasEntry("cluster.name", "elasticsearch"),
hasEntry("node.name", "sample-name"),
hasEntry("message", "deprecated message1"),
hasEntry("x-opaque-id", "someId")),
allOf(
hasEntry("type", "deprecated"),
hasEntry("level", "INFO"),
hasEntry("component", "test"),
hasEntry("cluster.name", "elasticsearch"),
hasEntry("node.name", "sample-name"),
hasEntry("message", "deprecated message2"),
not(hasKey("x-opaque-id"))
),
allOf(
hasEntry("type", "deprecated"),
hasEntry("level", "INFO"),
hasEntry("component", "test"),
hasEntry("cluster.name", "elasticsearch"),
hasEntry("node.name", "sample-name"),
hasEntry("message", "deprecated message3"),
not(hasKey("x-opaque-id"))
),
allOf(
hasEntry("type", "deprecated"),
hasEntry("level", "INFO"),
hasEntry("component", "test"),
hasEntry("cluster.name", "elasticsearch"),
hasEntry("node.name", "sample-name"),
hasEntry("message", "deprecated message4"),
not(hasKey("x-opaque-id"))
)
)
);
}
}
public void testJsonLayout() throws IOException { public void testJsonLayout() throws IOException {
final Logger testLogger = LogManager.getLogger("test"); final Logger testLogger = LogManager.getLogger("test");
@ -79,7 +165,7 @@ public class JsonLoggerTests extends ESTestCase {
try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) { try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) {
List<JsonLogLine> jsonLogs = collectLines(stream); List<JsonLogLine> jsonLogs = collectLines(stream);
assertThat(jsonLogs, Matchers.contains( assertThat(jsonLogs, contains(
logLine("file", Level.ERROR, "sample-name", "test", "This is an error message"), logLine("file", Level.ERROR, "sample-name", "test", "This is an error message"),
logLine("file", Level.WARN, "sample-name", "test", "This is a warning message"), logLine("file", Level.WARN, "sample-name", "test", "This is a warning message"),
logLine("file", Level.INFO, "sample-name", "test", "This is an info message"), logLine("file", Level.INFO, "sample-name", "test", "This is an info message"),
@ -99,8 +185,9 @@ public class JsonLoggerTests extends ESTestCase {
final Path path = clusterLogsPath(); final Path path = clusterLogsPath();
try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) { try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) {
List<JsonLogLine> jsonLogs = collectLines(stream); List<JsonLogLine> jsonLogs = collectLines(stream);
assertThat(jsonLogs, Matchers.contains( assertThat(jsonLogs, contains(
logLine("file", Level.INFO, "sample-name", "shardIdLogger", "[indexName][123] This is an info message with a shardId"), logLine("file", Level.INFO, "sample-name", "shardIdLogger",
"[indexName][123] This is an info message with a shardId"),
logLine("file", Level.INFO, "sample-name", "prefixLogger", "PREFIX This is an info message with a prefix") logLine("file", Level.INFO, "sample-name", "prefixLogger", "PREFIX This is an info message with a prefix")
)); ));
} }
@ -124,7 +211,7 @@ public class JsonLoggerTests extends ESTestCase {
final Path path = clusterLogsPath(); final Path path = clusterLogsPath();
try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) { try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) {
List<JsonLogLine> jsonLogs = collectLines(stream); List<JsonLogLine> jsonLogs = collectLines(stream);
assertThat(jsonLogs, Matchers.contains( assertThat(jsonLogs, contains(
logLine("file", Level.INFO, "sample-name", "test", json) logLine("file", Level.INFO, "sample-name", "test", json)
)); ));
} }
@ -137,8 +224,8 @@ public class JsonLoggerTests extends ESTestCase {
final Path path = clusterLogsPath(); final Path path = clusterLogsPath();
try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) { try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) {
List<JsonLogLine> jsonLogs = collectLines(stream); List<JsonLogLine> jsonLogs = collectLines(stream);
assertThat(jsonLogs, Matchers.contains( assertThat(jsonLogs, contains(
Matchers.allOf( allOf(
logLine("file", Level.ERROR, "sample-name", "test", "error message"), logLine("file", Level.ERROR, "sample-name", "test", "error message"),
stacktraceWith("java.lang.Exception: exception message"), stacktraceWith("java.lang.Exception: exception message"),
stacktraceWith("Caused by: java.lang.RuntimeException: cause message") stacktraceWith("Caused by: java.lang.RuntimeException: cause message")
@ -166,8 +253,8 @@ public class JsonLoggerTests extends ESTestCase {
try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) { try (Stream<JsonLogLine> stream = JsonLogsStream.from(path)) {
List<JsonLogLine> jsonLogs = collectLines(stream); List<JsonLogLine> jsonLogs = collectLines(stream);
assertThat(jsonLogs, Matchers.contains( assertThat(jsonLogs, contains(
Matchers.allOf( allOf(
//message field will have a single line with json escaped //message field will have a single line with json escaped
logLine("file", Level.ERROR, "sample-name", "test", "error message " + json), logLine("file", Level.ERROR, "sample-name", "test", "error message " + json),
@ -209,11 +296,11 @@ public class JsonLoggerTests extends ESTestCase {
@Override @Override
protected Boolean featureValueOf(JsonLogLine actual) { protected Boolean featureValueOf(JsonLogLine actual) {
return actual.type().equals(type) && return Objects.equals(actual.type(), type) &&
actual.level().equals(level.toString()) && Objects.equals(actual.level(), level.toString()) &&
actual.nodeName().equals(nodeName) && Objects.equals(actual.nodeName(), nodeName) &&
actual.component().equals(component) && Objects.equals(actual.component(), component) &&
actual.message().equals(message); Objects.equals(actual.message(), message);
} }
}; };
} }

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.logging;
public class JsonLogsTestSetup {
private static boolean initialized = false;
public static void init() {
if (initialized == false) {
LogConfigurator.setNodeName("sample-name");
initialized = true;
}
}
}

View File

@ -9,6 +9,26 @@ appender.file.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.l
appender.file.layout.type = ESJsonLayout appender.file.layout.type = ESJsonLayout
appender.file.layout.type_name = file appender.file.layout.type_name = file
appender.deprecated.type = File
appender.deprecated.name = deprecated
appender.deprecated.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecated.json
appender.deprecated.layout.type = ESJsonLayout
appender.deprecated.layout.type_name = deprecated
appender.deprecated.layout.esmessagefields = x-opaque-id
appender.deprecatedconsole.type = Console
appender.deprecatedconsole.name = deprecatedconsole
appender.deprecatedconsole.layout.type = ESJsonLayout
appender.deprecatedconsole.layout.type_name = deprecated
appender.deprecatedconsole.layout.esmessagefields = x-opaque-id
appender.index_search_slowlog_rolling.type = File
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
.cluster_name}_index_search_slowlog.json
appender.index_search_slowlog_rolling.layout.type = ESJsonLayout
appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog
appender.index_search_slowlog_rolling.layout.esmessagefields=message,took,took_millis,total_hits,types,stats,search_type,total_shards,source,id
rootLogger.level = info rootLogger.level = info
rootLogger.appenderRef.console.ref = console rootLogger.appenderRef.console.ref = console
@ -18,4 +38,6 @@ logger.test.name = test
logger.test.level = trace logger.test.level = trace
logger.test.appenderRef.console.ref = console logger.test.appenderRef.console.ref = console
logger.test.appenderRef.file.ref = file logger.test.appenderRef.file.ref = file
logger.test.appenderRef.deprecated.ref = deprecated
logger.test.appenderRef.deprecatedconsole.ref = deprecatedconsole
logger.test.additivity = false logger.test.additivity = false

View File

@ -50,7 +50,7 @@ section in order to setup the same environment for each test section.
A `teardown` section contains a list of commands to run after each test A `teardown` section contains a list of commands to run after each test
section in order to setup the same environment for each test section. This section in order to setup the same environment for each test section. This
may be needed for modifications made by the testthat are not cleared by the may be needed for modifications made by the test that are not cleared by the
deletion of indices and templates. deletion of indices and templates.
A test section represents an independent test, containing multiple `do` A test section represents an independent test, containing multiple `do`

View File

@ -0,0 +1,45 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.logging;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.MapBuilder;
import java.util.Collections;
import java.util.Map;
/**
* A logger message used by {@link DeprecationLogger}.
* Carries x-opaque-id field if provided in the headers. Will populate the x-opaque-id field in JSON logs.
*/
public class DeprecatedMessage extends ESLogMessage {
public DeprecatedMessage(String messagePattern, String xOpaqueId, Object... args) {
super(fieldMap(xOpaqueId), messagePattern, args);
}
private static Map<String, Object> fieldMap(String xOpaqueId) {
if (Strings.isNullOrEmpty(xOpaqueId)) {
return Collections.emptyMap();
}
return MapBuilder.<String,Object>newMapBuilder().put("x-opaque-id", xOpaqueId).immutableMap();
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.Build;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.tasks.Task;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.security.AccessController; import java.security.AccessController;
@ -222,13 +223,14 @@ public class DeprecationLogger {
} }
void deprecated(final Set<ThreadContext> threadContexts, final String message, final boolean log, final Object... params) { void deprecated(final Set<ThreadContext> threadContexts, final String message, final boolean log, final Object... params) {
final Iterator<ThreadContext> iterator = threadContexts.iterator();
if (iterator.hasNext()) {
final String formattedMessage = LoggerMessageFormat.format(message, params); final String formattedMessage = LoggerMessageFormat.format(message, params);
final String warningHeaderValue = formatWarning(formattedMessage); final String warningHeaderValue = formatWarning(formattedMessage);
assert WARNING_HEADER_PATTERN.matcher(warningHeaderValue).matches(); assert WARNING_HEADER_PATTERN.matcher(warningHeaderValue).matches();
assert extractWarningValueFromWarningHeader(warningHeaderValue).equals(escapeAndEncode(formattedMessage)); assert extractWarningValueFromWarningHeader(warningHeaderValue).equals(escapeAndEncode(formattedMessage));
final Iterator<ThreadContext> iterator = threadContexts.iterator();
if (iterator.hasNext()) {
while (iterator.hasNext()) { while (iterator.hasNext()) {
try { try {
final ThreadContext next = iterator.next(); final ThreadContext next = iterator.next();
@ -244,7 +246,16 @@ public class DeprecationLogger {
@SuppressLoggerChecks(reason = "safely delegates to logger") @SuppressLoggerChecks(reason = "safely delegates to logger")
@Override @Override
public Void run() { public Void run() {
logger.warn(message, params); /**
* There should be only one threadContext (in prod env), @see DeprecationLogger#setThreadContext
*/
String opaqueId = threadContexts.stream()
.filter(t -> t.isClosed() == false)
.findFirst()
.map(t -> t.getHeader(Task.X_OPAQUE_ID))
.orElse("");
logger.warn(new DeprecatedMessage(message, opaqueId, params));
return null; return null;
} }
}); });

View File

@ -24,6 +24,7 @@ import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.config.Node; import org.apache.logging.log4j.core.config.Node;
import org.apache.logging.log4j.core.config.plugins.Plugin; import org.apache.logging.log4j.core.config.plugins.Plugin;
import org.apache.logging.log4j.core.config.plugins.PluginAttribute; import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
import org.apache.logging.log4j.core.config.plugins.PluginBuilderFactory;
import org.apache.logging.log4j.core.config.plugins.PluginFactory; import org.apache.logging.log4j.core.config.plugins.PluginFactory;
import org.apache.logging.log4j.core.layout.AbstractStringLayout; import org.apache.logging.log4j.core.layout.AbstractStringLayout;
import org.apache.logging.log4j.core.layout.ByteBufferDestination; import org.apache.logging.log4j.core.layout.ByteBufferDestination;
@ -31,18 +32,19 @@ import org.apache.logging.log4j.core.layout.PatternLayout;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/** /**
* Formats log events as strings in a json format. * Formats log events as strings in a json format.
* <p> * <p>
* The class is wrapping the {@link PatternLayout} with a pattern to format into json. This gives more flexibility and control over how the * The class is wrapping the {@link PatternLayout} with a pattern to format into json. This gives more flexibility and control over how the
* log messages are formatted in {@link org.apache.logging.log4j.core.layout.JsonLayout} * log messages are formatted in {@link org.apache.logging.log4j.core.layout.JsonLayout}
*/ * There are fields which are always present in the log line:
@Plugin(name = "ESJsonLayout", category = Node.CATEGORY, elementType = Layout.ELEMENT_TYPE, printObject = true)
public class ESJsonLayout extends AbstractStringLayout {
/**
* Fields used in a pattern to format a json log line:
* <ul> * <ul>
* <li>type - the type of logs. These represent appenders and help docker distinguish log streams.</li> * <li>type - the type of logs. These represent appenders and help docker distinguish log streams.</li>
* <li>timestamp - ISO8601 with additional timezone ID</li> * <li>timestamp - ISO8601 with additional timezone ID</li>
@ -57,40 +59,156 @@ public class ESJsonLayout extends AbstractStringLayout {
* <li>exceptionAsJson - in json as a stacktrace field. Only present when throwable is passed as a parameter when using a logger. * <li>exceptionAsJson - in json as a stacktrace field. Only present when throwable is passed as a parameter when using a logger.
* Taken from JsonThrowablePatternConverter</li> * Taken from JsonThrowablePatternConverter</li>
* </ul> * </ul>
* <p>
* It is possible to add more or override them with <code>esmessagefield</code>
* <code>appender.logger.layout.esmessagefields=message,took,took_millis,total_hits,types,stats,search_type,total_shards,source,id</code>
* Each of these will be expanded into a json field with a value taken {@link ESLogMessage} field. In the example above
* <code>... "message": %ESMessageField{message}, "took": %ESMessageField{took} ...</code>
* the message passed to a logger will be overriden with a value from %ESMessageField{message}
* <p>
* The value taken from %ESMessageField{message} has to be a simple escaped JSON value and is populated in subclasses of
* <code>ESLogMessage</code>
*/ */
private static final String PATTERN = "{" + @Plugin(name = "ESJsonLayout", category = Node.CATEGORY, elementType = Layout.ELEMENT_TYPE, printObject = true)
"\"type\": \"${TYPE}\", " + public class ESJsonLayout extends AbstractStringLayout {
"\"timestamp\": \"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZ}\", " +
"\"level\": \"%p\", " +
"\"component\": \"%c{1.}\", " +
"\"cluster.name\": \"${sys:es.logs.cluster_name}\", " +
"\"node.name\": \"%node_name\", " +
"%notEmpty{%node_and_cluster_id, } " +
"\"message\": \"%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}\" " +
"%exceptionAsJson " +
"}%n";
private final PatternLayout patternLayout; private final PatternLayout patternLayout;
protected ESJsonLayout(String typeName, Charset charset) { protected ESJsonLayout(String typeName, Charset charset, String[] esmessagefields) {
super(charset); super(charset);
this.patternLayout = PatternLayout.newBuilder() this.patternLayout = PatternLayout.newBuilder()
.withPattern(pattern(typeName)) .withPattern(pattern(typeName, esmessagefields))
.withAlwaysWriteExceptions(false) .withAlwaysWriteExceptions(false)
.build(); .build();
} }
private String pattern(String type) { private String pattern(String type, String[] esMessageFields) {
if (Strings.isEmpty(type)) { if (Strings.isEmpty(type)) {
throw new IllegalArgumentException("layout parameter 'type_name' cannot be empty"); throw new IllegalArgumentException("layout parameter 'type_name' cannot be empty");
} }
return PATTERN.replace("${TYPE}", type); Map<String, Object> map = new LinkedHashMap<>();
map.put("type", inQuotes(type));
map.put("timestamp", inQuotes("%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}"));
map.put("level", inQuotes("%p"));
map.put("component", inQuotes("%c{1.}"));
map.put("cluster.name", inQuotes("${sys:es.logs.cluster_name}"));
map.put("node.name", inQuotes("%node_name"));
map.put("message", inQuotes("%notEmpty{%enc{%marker}{JSON} }%enc{%.-10000m}{JSON}"));
for (String key : esMessageFields) {
map.put(key, inQuotes("%ESMessageField{" + key + "}"));
}
return createPattern(map, Stream.of(esMessageFields).collect(Collectors.toSet()));
}
private String createPattern(Map<String, Object> map, Set<String> esMessageFields) {
StringBuilder sb = new StringBuilder();
sb.append("{");
String separator = "";
for (Map.Entry<String, Object> entry : map.entrySet()) {
if (esMessageFields.contains(entry.getKey())) {
sb.append("%notEmpty{");
sb.append(separator);
appendField(sb, entry);
sb.append("}");
} else {
sb.append(separator);
appendField(sb, entry);
}
separator = ", ";
}
sb.append(notEmpty(", %node_and_cluster_id "));
sb.append("%exceptionAsJson ");
sb.append("}");
sb.append(System.lineSeparator());
return sb.toString();
}
private void appendField(StringBuilder sb, Map.Entry<String, Object> entry) {
sb.append(jsonKey(entry.getKey()));
sb.append(entry.getValue().toString());
}
private String notEmpty(String value) {
return "%notEmpty{" + value + "}";
}
private CharSequence jsonKey(String s) {
return inQuotes(s) + ": ";
}
private String inQuotes(String s) {
return "\"" + s + "\"";
} }
@PluginFactory @PluginFactory
public static ESJsonLayout createLayout(@PluginAttribute("type_name") String type, public static ESJsonLayout createLayout(String type,
@PluginAttribute(value = "charset", defaultString = "UTF-8") Charset charset) { Charset charset,
return new ESJsonLayout(type, charset); String[] esmessagefields) {
return new ESJsonLayout(type, charset, esmessagefields);
}
PatternLayout getPatternLayout() {
return patternLayout;
}
public static class Builder<B extends ESJsonLayout.Builder<B>> extends AbstractStringLayout.Builder<B>
implements org.apache.logging.log4j.core.util.Builder<ESJsonLayout> {
@PluginAttribute("type_name")
String type;
@PluginAttribute(value = "charset", defaultString = "UTF-8")
Charset charset;
@PluginAttribute("esmessagefields")
private String esMessageFields;
public Builder() {
setCharset(StandardCharsets.UTF_8);
}
@Override
public ESJsonLayout build() {
String[] split = Strings.isNullOrEmpty(esMessageFields) ? new String[]{} : esMessageFields.split(",");
return ESJsonLayout.createLayout(type, charset, split);
}
public Charset getCharset() {
return charset;
}
public B setCharset(final Charset charset) {
this.charset = charset;
return asBuilder();
}
public String getType() {
return type;
}
public B setType(final String type) {
this.type = type;
return asBuilder();
}
public String getESMessageFields() {
return esMessageFields;
}
public B setESMessageFields(String esmessagefields) {
this.esMessageFields = esmessagefields;
return asBuilder();
}
}
@PluginBuilderFactory
public static <B extends ESJsonLayout.Builder<B>> B newBuilder() {
return new ESJsonLayout.Builder<B>().asBuilder();
} }
@Override @Override

View File

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.logging;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.SuppressLoggerChecks;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* A base class for custom log4j logger messages. Carries additional fields which will populate JSON fields in logs.
*/
public abstract class ESLogMessage extends ParameterizedMessage {
private final Map<String, Object> fields;
/**
* This is an abstract class, so this is safe. The check is done on DeprecationMessage.
* Other subclasses are not allowing varargs
*/
@SuppressLoggerChecks(reason = "Safe as this is abstract class")
public ESLogMessage(Map<String, Object> fields, String messagePattern, Object... args) {
super(messagePattern, args);
this.fields = fields;
}
public String getValueFor(String key) {
Object value = fields.get(key);
return value!=null ? value.toString() : null;
}
public static String inQuotes(String s) {
if(s == null)
return inQuotes("");
return "\"" + s + "\"";
}
public static String inQuotes(Object s) {
if(s == null)
return inQuotes("");
return inQuotes(s.toString());
}
public static String asJsonArray(Stream<String> stream) {
return "[" + stream
.map(ESLogMessage::inQuotes)
.collect(Collectors.joining(", ")) + "]";
}
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.logging;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.plugins.Plugin;
import org.apache.logging.log4j.core.pattern.ConverterKeys;
import org.apache.logging.log4j.core.pattern.LogEventPatternConverter;
import org.apache.logging.log4j.core.pattern.PatternConverter;
import org.apache.logging.log4j.util.StringBuilders;
import org.elasticsearch.common.Strings;
/**
* Pattern converter to populate ESMessageField in a pattern.
* It will only populate these if the event have message of type <code>ESLogMessage</code>.
*/
@Plugin(category = PatternConverter.CATEGORY, name = "ESMessageField")
@ConverterKeys({"ESMessageField"})
public final class ESMessageFieldConverter extends LogEventPatternConverter {
private String key;
/**
* Called by log4j2 to initialize this converter.
*/
public static ESMessageFieldConverter newInstance(final Configuration config, final String[] options) {
final String key = options[0];
return new ESMessageFieldConverter(key);
}
public ESMessageFieldConverter(String key) {
super("ESMessageField", "ESMessageField");
this.key = key;
}
@Override
public void format(LogEvent event, StringBuilder toAppendTo) {
if (event.getMessage() instanceof ESLogMessage) {
ESLogMessage logMessage = (ESLogMessage) event.getMessage();
final String value = logMessage.getValueFor(key);
if (Strings.isNullOrEmpty(value) == false) {
StringBuilders.appendValue(toAppendTo, value);
return;
}
}
StringBuilders.appendValue(toAppendTo, "");
}
}

View File

@ -23,6 +23,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CloseableThreadLocal; import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener;
import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.client.OriginSettingClient;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
@ -30,6 +31,7 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.tasks.Task;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
@ -130,7 +132,20 @@ public final class ThreadContext implements Closeable, Writeable {
*/ */
public StoredContext stashContext() { public StoredContext stashContext() {
final ThreadContextStruct context = threadLocal.get(); final ThreadContextStruct context = threadLocal.get();
/**
* X-Opaque-ID should be preserved in a threadContext in order to propagate this across threads.
* This is needed so the DeprecationLogger in another thread can see the value of X-Opaque-ID provided by a user.
* Otherwise when context is stash, it should be empty.
*/
if (context.requestHeaders.containsKey(Task.X_OPAQUE_ID)) {
ThreadContextStruct threadContextStruct =
DEFAULT_CONTEXT.putHeaders(MapBuilder.<String, String>newMapBuilder()
.put(Task.X_OPAQUE_ID, context.requestHeaders.get(Task.X_OPAQUE_ID))
.immutableMap());
threadLocal.set(threadContextStruct);
} else {
threadLocal.set(null); threadLocal.set(null);
}
return () -> { return () -> {
// If the node and thus the threadLocal get closed while this task // If the node and thus the threadLocal get closed while this task
// is still executing, we don't want this runnable to fail with an // is still executing, we don't want this runnable to fail with an
@ -403,7 +418,7 @@ public final class ThreadContext implements Closeable, Writeable {
/** /**
* Returns <code>true</code> if the context is closed, otherwise <code>true</code> * Returns <code>true</code> if the context is closed, otherwise <code>true</code>
*/ */
boolean isClosed() { public boolean isClosed() {
return threadLocal.closed.get(); return threadLocal.closed.get();
} }

View File

@ -19,10 +19,12 @@
package org.elasticsearch.index; package org.elasticsearch.index;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.util.StringBuilders;
import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.ESLogMessage;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
@ -35,7 +37,9 @@ import org.elasticsearch.index.shard.ShardId;
import java.io.IOException; import java.io.IOException;
import java.io.UncheckedIOException; import java.io.UncheckedIOException;
import java.util.HashMap;
import java.util.Locale; import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
public final class IndexingSlowLog implements IndexingOperationListener { public final class IndexingSlowLog implements IndexingOperationListener {
@ -149,34 +153,58 @@ public final class IndexingSlowLog implements IndexingOperationListener {
final ParsedDocument doc = indexOperation.parsedDoc(); final ParsedDocument doc = indexOperation.parsedDoc();
final long tookInNanos = result.getTook(); final long tookInNanos = result.getTook();
if (indexWarnThreshold >= 0 && tookInNanos > indexWarnThreshold) { if (indexWarnThreshold >= 0 && tookInNanos > indexWarnThreshold) {
indexLogger.warn("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog)); indexLogger.warn( new IndexingSlowLogMessage(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} else if (indexInfoThreshold >= 0 && tookInNanos > indexInfoThreshold) { } else if (indexInfoThreshold >= 0 && tookInNanos > indexInfoThreshold) {
indexLogger.info("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog)); indexLogger.info(new IndexingSlowLogMessage(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} else if (indexDebugThreshold >= 0 && tookInNanos > indexDebugThreshold) { } else if (indexDebugThreshold >= 0 && tookInNanos > indexDebugThreshold) {
indexLogger.debug("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog)); indexLogger.debug(new IndexingSlowLogMessage(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} else if (indexTraceThreshold >= 0 && tookInNanos > indexTraceThreshold) { } else if (indexTraceThreshold >= 0 && tookInNanos > indexTraceThreshold) {
indexLogger.trace("{}", new SlowLogParsedDocumentPrinter(index, doc, tookInNanos, reformat, maxSourceCharsToLog)); indexLogger.trace( new IndexingSlowLogMessage(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} }
} }
} }
static final class SlowLogParsedDocumentPrinter { static final class IndexingSlowLogMessage extends ESLogMessage {
private final ParsedDocument doc;
private final long tookInNanos;
private final boolean reformat;
private final int maxSourceCharsToLog;
private final Index index;
SlowLogParsedDocumentPrinter(Index index, ParsedDocument doc, long tookInNanos, boolean reformat, int maxSourceCharsToLog) { IndexingSlowLogMessage(Index index, ParsedDocument doc, long tookInNanos, boolean reformat, int maxSourceCharsToLog) {
this.doc = doc; super(prepareMap(index,doc,tookInNanos,reformat,maxSourceCharsToLog),
this.index = index; message(index,doc,tookInNanos,reformat,maxSourceCharsToLog));
this.tookInNanos = tookInNanos;
this.reformat = reformat;
this.maxSourceCharsToLog = maxSourceCharsToLog;
} }
@Override private static Map<String, Object> prepareMap(Index index, ParsedDocument doc, long tookInNanos, boolean reformat,
public String toString() { int maxSourceCharsToLog) {
Map<String,Object> map = new HashMap<>();
map.put("message", index);
map.put("took", TimeValue.timeValueNanos(tookInNanos));
map.put("took_millis", ""+TimeUnit.NANOSECONDS.toMillis(tookInNanos));
map.put("doc_type", doc.type());
map.put("id", doc.id());
map.put("routing", doc.routing());
if (maxSourceCharsToLog == 0 || doc.source() == null || doc.source().length() == 0) {
return map;
}
try {
String source = XContentHelper.convertToJson(doc.source(), reformat, doc.getXContentType());
String trim = Strings.cleanTruncate(source, maxSourceCharsToLog).trim();
StringBuilder sb = new StringBuilder(trim);
StringBuilders.escapeJson(sb,0);
map.put("source", sb.toString());
} catch (IOException e) {
StringBuilder sb = new StringBuilder("_failed_to_convert_[" + e.getMessage()+"]");
StringBuilders.escapeJson(sb,0);
map.put("source", sb.toString());
/*
* We choose to fail to write to the slow log and instead let this percolate up to the post index listener loop where this
* will be logged at the warn level.
*/
final String message = String.format(Locale.ROOT, "failed to convert source for slow log entry [%s]", map.toString());
throw new UncheckedIOException(message, e);
}
return map;
}
private static String message(Index index, ParsedDocument doc, long tookInNanos, boolean reformat, int maxSourceCharsToLog) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append(index).append(" "); sb.append(index).append(" ");
sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], "); sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], ");

View File

@ -19,9 +19,11 @@
package org.elasticsearch.index; package org.elasticsearch.index;
import org.apache.logging.log4j.Logger; import com.fasterxml.jackson.core.io.JsonStringEncoder;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.ESLogMessage;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
@ -31,8 +33,12 @@ import org.elasticsearch.index.shard.SearchOperationListener;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import java.nio.charset.Charset;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
public final class SearchSlowLog implements SearchOperationListener { public final class SearchSlowLog implements SearchOperationListener {
private long queryWarnThreshold; private long queryWarnThreshold;
@ -121,43 +127,69 @@ public final class SearchSlowLog implements SearchOperationListener {
Loggers.setLevel(queryLogger, level.name()); Loggers.setLevel(queryLogger, level.name());
Loggers.setLevel(fetchLogger, level.name()); Loggers.setLevel(fetchLogger, level.name());
} }
@Override @Override
public void onQueryPhase(SearchContext context, long tookInNanos) { public void onQueryPhase(SearchContext context, long tookInNanos) {
if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) { if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) {
queryLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos)); queryLogger.warn(new SearchSlowLogMessage(context, tookInNanos));
} else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold) { } else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold) {
queryLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos)); queryLogger.info(new SearchSlowLogMessage(context, tookInNanos));
} else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold) { } else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold) {
queryLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos)); queryLogger.debug(new SearchSlowLogMessage(context, tookInNanos));
} else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold) { } else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold) {
queryLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos)); queryLogger.trace(new SearchSlowLogMessage(context, tookInNanos));
} }
} }
@Override @Override
public void onFetchPhase(SearchContext context, long tookInNanos) { public void onFetchPhase(SearchContext context, long tookInNanos) {
if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) { if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) {
fetchLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos)); fetchLogger.warn(new SearchSlowLogMessage(context, tookInNanos));
} else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold) { } else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold) {
fetchLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos)); fetchLogger.info(new SearchSlowLogMessage(context, tookInNanos));
} else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold) { } else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold) {
fetchLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos)); fetchLogger.debug(new SearchSlowLogMessage(context, tookInNanos));
} else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold) { } else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold) {
fetchLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos)); fetchLogger.trace(new SearchSlowLogMessage(context, tookInNanos));
} }
} }
static final class SlowLogSearchContextPrinter { static final class SearchSlowLogMessage extends ESLogMessage {
private final SearchContext context;
private final long tookInNanos;
SlowLogSearchContextPrinter(SearchContext context, long tookInNanos) { SearchSlowLogMessage(SearchContext context, long tookInNanos) {
this.context = context; super(prepareMap(context, tookInNanos), message(context, tookInNanos));
this.tookInNanos = tookInNanos;
} }
@Override private static Map<String, Object> prepareMap(SearchContext context, long tookInNanos) {
public String toString() { Map<String, Object> messageFields = new HashMap<>();
messageFields.put("message", context.indexShard().shardId());
messageFields.put("took", TimeValue.timeValueNanos(tookInNanos));
messageFields.put("took_millis", TimeUnit.NANOSECONDS.toMillis(tookInNanos));
if (context.queryResult().getTotalHits() != null) {
messageFields.put("total_hits", context.queryResult().getTotalHits());
} else {
messageFields.put("total_hits", "-1");
}
messageFields.put("stats", asJsonArray(context.groupStats() != null ? context.groupStats().stream() : Stream.empty()));
messageFields.put("search_type", context.searchType());
messageFields.put("total_shards", context.numberOfShards());
if (context.request().source() != null) {
byte[] sourceEscaped = JsonStringEncoder.getInstance()
.quoteAsUTF8(context.request().source().toString(FORMAT_PARAMS));
String source = new String(sourceEscaped, Charset.defaultCharset());
messageFields.put("source", source);
} else {
messageFields.put("source", "{}");
}
messageFields.put("id", context.getTask().getHeader(Task.X_OPAQUE_ID));
return messageFields;
}
// Message will be used in plaintext logs
private static String message(SearchContext context, long tookInNanos) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append(context.indexShard().shardId()) sb.append(context.indexShard().shardId())
.append(" ") .append(" ")

View File

@ -26,6 +26,7 @@ import org.apache.logging.log4j.simple.SimpleLoggerContextFactory;
import org.apache.logging.log4j.spi.ExtendedLogger; import org.apache.logging.log4j.spi.ExtendedLogger;
import org.apache.logging.log4j.spi.LoggerContext; import org.apache.logging.log4j.spi.LoggerContext;
import org.apache.logging.log4j.spi.LoggerContextFactory; import org.apache.logging.log4j.spi.LoggerContextFactory;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -56,6 +57,7 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -318,7 +320,7 @@ public class DeprecationLoggerTests extends ESTestCase {
assertTrue(warningHeadersSize <= 1024); assertTrue(warningHeadersSize <= 1024);
} }
} }
@SuppressLoggerChecks(reason = "Safe as this is using mockito")
public void testLogPermissions() { public void testLogPermissions() {
AtomicBoolean supplierCalled = new AtomicBoolean(false); AtomicBoolean supplierCalled = new AtomicBoolean(false);
@ -330,7 +332,7 @@ public class DeprecationLoggerTests extends ESTestCase {
supplierCalled.set(true); supplierCalled.set(true);
createTempDir(); // trigger file permission, like rolling logs would createTempDir(); // trigger file permission, like rolling logs would
return null; return null;
}).when(mockLogger).warn("foo", new Object[] {"bar"}); }).when(mockLogger).warn(new DeprecatedMessage("foo", any()));
final LoggerContext context = new SimpleLoggerContext() { final LoggerContext context = new SimpleLoggerContext() {
@Override @Override
public ExtendedLogger getLogger(String name) { public ExtendedLogger getLogger(String name) {

View File

@ -29,7 +29,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexingSlowLog.SlowLogParsedDocumentPrinter; import org.elasticsearch.index.IndexingSlowLog.IndexingSlowLogMessage;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -38,13 +38,39 @@ import java.io.IOException;
import java.io.UncheckedIOException; import java.io.UncheckedIOException;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.emptyOrNullString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.Matchers.startsWith;
public class IndexingSlowLogTests extends ESTestCase { public class IndexingSlowLogTests extends ESTestCase {
public void testSlowLogMessageHasJsonFields() throws IOException {
BytesReference source = BytesReference.bytes(JsonXContent.contentBuilder()
.startObject().field("foo", "bar").endObject());
ParsedDocument pd = new ParsedDocument(new NumericDocValuesField("version", 1),
SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id",
"test", "routingValue", null, source, XContentType.JSON, null);
Index index = new Index("foo", "123");
// Turning off document logging doesn't log source[]
IndexingSlowLogMessage p = new IndexingSlowLogMessage(index, pd, 10, true, 0);
assertThat(p.getValueFor("message"),equalTo("[foo/123]"));
assertThat(p.getValueFor("took"),equalTo("10nanos"));
assertThat(p.getValueFor("took_millis"),equalTo("0"));
assertThat(p.getValueFor("doc_type"),equalTo("test"));
assertThat(p.getValueFor("id"),equalTo("id"));
assertThat(p.getValueFor("routing"),equalTo("routingValue"));
assertThat(p.getValueFor("source"), is(emptyOrNullString()));
// Turning on document logging logs the whole thing
p = new IndexingSlowLogMessage(index, pd, 10, true, Integer.MAX_VALUE);
assertThat(p.getValueFor("source"), containsString("{\\\"foo\\\":\\\"bar\\\"}"));
}
public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException {
BytesReference source = BytesReference.bytes(JsonXContent.contentBuilder() BytesReference source = BytesReference.bytes(JsonXContent.contentBuilder()
.startObject().field("foo", "bar").endObject()); .startObject().field("foo", "bar").endObject());
@ -53,32 +79,32 @@ public class IndexingSlowLogTests extends ESTestCase {
"test", null, null, source, XContentType.JSON, null); "test", null, null, source, XContentType.JSON, null);
Index index = new Index("foo", "123"); Index index = new Index("foo", "123");
// Turning off document logging doesn't log source[] // Turning off document logging doesn't log source[]
SlowLogParsedDocumentPrinter p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 0); IndexingSlowLogMessage p = new IndexingSlowLogMessage(index, pd, 10, true, 0);
assertThat(p.toString(), not(containsString("source["))); assertThat(p.getFormattedMessage(), not(containsString("source[")));
// Turning on document logging logs the whole thing // Turning on document logging logs the whole thing
p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, Integer.MAX_VALUE); p = new IndexingSlowLogMessage(index, pd, 10, true, Integer.MAX_VALUE);
assertThat(p.toString(), containsString("source[{\"foo\":\"bar\"}]")); assertThat(p.getFormattedMessage(), containsString("source[{\"foo\":\"bar\"}]"));
// And you can truncate the source // And you can truncate the source
p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3); p = new IndexingSlowLogMessage(index, pd, 10, true, 3);
assertThat(p.toString(), containsString("source[{\"f]")); assertThat(p.getFormattedMessage(), containsString("source[{\"f]"));
// And you can truncate the source // And you can truncate the source
p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3); p = new IndexingSlowLogMessage(index, pd, 10, true, 3);
assertThat(p.toString(), containsString("source[{\"f]")); assertThat(p.getFormattedMessage(), containsString("source[{\"f]"));
assertThat(p.toString(), startsWith("[foo/123] took")); assertThat(p.getFormattedMessage(), startsWith("[foo/123] took"));
// Throwing a error if source cannot be converted // Throwing a error if source cannot be converted
source = new BytesArray("invalid"); source = new BytesArray("invalid");
pd = new ParsedDocument(new NumericDocValuesField("version", 1), ParsedDocument doc = new ParsedDocument(new NumericDocValuesField("version", 1),
SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id", SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id",
"test", null, null, source, XContentType.JSON, null); "test", null, null, source, XContentType.JSON, null);
p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3);
final UncheckedIOException e = expectThrows(UncheckedIOException.class, p::toString); final UncheckedIOException e = expectThrows(UncheckedIOException.class,
()->new IndexingSlowLogMessage(index, doc, 10, true, 3));
assertThat(e, hasToString(containsString("_failed_to_convert_[Unrecognized token 'invalid':" assertThat(e, hasToString(containsString("_failed_to_convert_[Unrecognized token 'invalid':"
+ " was expecting ('true', 'false' or 'null')\n" + " was expecting ('true', 'false' or 'null')\\n"
+ " at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper"))); + " at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper")));
assertNotNull(e.getCause()); assertNotNull(e.getCause());
assertThat(e.getCause(), instanceOf(JsonParseException.class)); assertThat(e.getCause(), instanceOf(JsonParseException.class));
@ -128,22 +154,6 @@ public class IndexingSlowLogTests extends ESTestCase {
assertTrue(log.isReformat()); assertTrue(log.isReformat());
} }
public void testReformatIsFalseAndSourceIsTrim() {
String json = "\n\n{ \"fieldName\": 123 } \n ";
BytesReference source = new BytesArray(json);
ParsedDocument pd = new ParsedDocument(new NumericDocValuesField("version", 1),
SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id",
"test", null, null, source, XContentType.JSON, null);
Index index = new Index("foo", "123");
// Turning off reformatting so the document is in logs as provided
SlowLogParsedDocumentPrinter p = new SlowLogParsedDocumentPrinter(index, pd, 10, false, 1000);
String logLine = p.toString();
//expect the new lines and white characters to be trimmed
assertThat(logLine, containsString("source[{"));
assertThat(logLine.split("\n").length, equalTo(1));
}
public void testLevelSetting() { public void testLevelSetting() {
SlowLogLevel level = randomFrom(SlowLogLevel.values()); SlowLogLevel level = randomFrom(SlowLogLevel.values());
IndexMetaData metaData = newIndexMeta("index", Settings.builder() IndexMetaData metaData = newIndexMeta("index", Settings.builder()

View File

@ -39,12 +39,14 @@ import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.TestSearchContext;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.Matchers;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
@ -155,6 +157,25 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
}; };
} }
public void testSlowLogHasJsonFields() throws IOException {
IndexService index = createIndex("foo");
SearchContext searchContext = createSearchContext(index);
SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery());
searchContext.request().source(source);
searchContext.setTask(new SearchTask(0, "n/a", "n/a", "test", null,
Collections.singletonMap(Task.X_OPAQUE_ID, "my_id")));
SearchSlowLog.SearchSlowLogMessage p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
assertThat(p.getValueFor("message"), equalTo("[foo][0]"));
assertThat(p.getValueFor("took"), equalTo("10nanos"));
assertThat(p.getValueFor("took_millis"), equalTo("0"));
assertThat(p.getValueFor("total_hits"), equalTo("-1"));
assertThat(p.getValueFor("stats"), equalTo("[]"));
assertThat(p.getValueFor("search_type"), Matchers.nullValue());
assertThat(p.getValueFor("total_shards"), equalTo("1"));
assertThat(p.getValueFor("source"), equalTo("{\\\"query\\\":{\\\"match_all\\\":{\\\"boost\\\":1.0}}}"));
}
public void testSlowLogSearchContextPrinterToLog() throws IOException { public void testSlowLogSearchContextPrinterToLog() throws IOException {
IndexService index = createIndex("foo"); IndexService index = createIndex("foo");
SearchContext searchContext = createSearchContext(index); SearchContext searchContext = createSearchContext(index);
@ -162,11 +183,11 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
searchContext.request().source(source); searchContext.request().source(source);
searchContext.setTask(new SearchTask(0, "n/a", "n/a", "test", null, searchContext.setTask(new SearchTask(0, "n/a", "n/a", "test", null,
Collections.singletonMap(Task.X_OPAQUE_ID, "my_id"))); Collections.singletonMap(Task.X_OPAQUE_ID, "my_id")));
SearchSlowLog.SlowLogSearchContextPrinter p = new SearchSlowLog.SlowLogSearchContextPrinter(searchContext, 10); SearchSlowLog.SearchSlowLogMessage p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
assertThat(p.toString(), startsWith("[foo][0]")); assertThat(p.getFormattedMessage(), startsWith("[foo][0]"));
// Makes sure that output doesn't contain any new lines // Makes sure that output doesn't contain any new lines
assertThat(p.toString(), not(containsString("\n"))); assertThat(p.getFormattedMessage(), not(containsString("\n")));
assertThat(p.toString(), endsWith("id[my_id], ")); assertThat(p.getFormattedMessage(), endsWith("id[my_id], "));
} }
public void testLevelSetting() { public void testLevelSetting() {

View File

@ -30,7 +30,7 @@ import java.util.List;
* Parsing log lines with this class confirms the json format of logs * Parsing log lines with this class confirms the json format of logs
*/ */
public class JsonLogLine { public class JsonLogLine {
public static final ObjectParser<JsonLogLine, Void> PARSER = createParser(false); public static final ObjectParser<JsonLogLine, Void> PARSER = createParser(true);
private String type; private String type;
private String timestamp; private String timestamp;

View File

@ -30,6 +30,8 @@ import java.io.UncheckedIOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Spliterator; import java.util.Spliterator;
import java.util.Spliterators; import java.util.Spliterators;
import java.util.stream.Stream; import java.util.stream.Stream;
@ -57,12 +59,22 @@ public class JsonLogsStream {
return from(Files.newBufferedReader(path)); return from(Files.newBufferedReader(path));
} }
public static Stream<Map<String, String>> mapStreamFrom(Path path) throws IOException {
return new JsonLogsStream(Files.newBufferedReader(path)).streamMap();
}
private Stream<JsonLogLine> stream() { private Stream<JsonLogLine> stream() {
Spliterator<JsonLogLine> spliterator = Spliterators.spliteratorUnknownSize(new JsonIterator(), Spliterator.ORDERED); Spliterator<JsonLogLine> spliterator = Spliterators.spliteratorUnknownSize(new JsonIterator(), Spliterator.ORDERED);
return StreamSupport.stream(spliterator, false) return StreamSupport.stream(spliterator, false)
.onClose(this::close); .onClose(this::close);
} }
private Stream<Map<String, String>> streamMap() {
Spliterator<Map<String, String>> spliterator = Spliterators.spliteratorUnknownSize(new MapIterator(), Spliterator.ORDERED);
return StreamSupport.stream(spliterator, false)
.onClose(this::close);
}
private void close() { private void close() {
try { try {
parser.close(); parser.close();
@ -72,6 +84,26 @@ public class JsonLogsStream {
} }
} }
private class MapIterator implements Iterator<Map<String, String>> {
@Override
public boolean hasNext() {
return parser.isClosed() == false;
}
@Override
public Map<String, String> next() {
Map<String, String> map;
try {
map = parser.map(LinkedHashMap::new, XContentParser::text);
parser.nextToken();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return map;
}
}
private class JsonIterator implements Iterator<JsonLogLine> { private class JsonIterator implements Iterator<JsonLogLine> {
@Override @Override

View File

@ -52,21 +52,32 @@ import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.BasicFileAttributes;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class ESLoggerUsageChecker { public class ESLoggerUsageChecker {
public static final Type LOGGER_CLASS = Type.getType(Logger.class); public static final Type LOGGER_CLASS = Type.getType(Logger.class);
public static final Type THROWABLE_CLASS = Type.getType(Throwable.class); public static final Type THROWABLE_CLASS = Type.getType(Throwable.class);
public static final Type STRING_CLASS = Type.getType(String.class); public static final Type STRING_CLASS = Type.getType(String.class);
public static final Type STRING_ARRAY_CLASS = Type.getType(String[].class); public static final Type STRING_ARRAY_CLASS = Type.getType(String[].class);
public static final Type PARAMETERIZED_MESSAGE_CLASS = Type.getType(ParameterizedMessage.class);
public static final Type OBJECT_CLASS = Type.getType(Object.class); public static final Type OBJECT_CLASS = Type.getType(Object.class);
public static final Type OBJECT_ARRAY_CLASS = Type.getType(Object[].class); public static final Type OBJECT_ARRAY_CLASS = Type.getType(Object[].class);
public static final Type SUPPLIER_ARRAY_CLASS = Type.getType(Supplier[].class); public static final Type SUPPLIER_ARRAY_CLASS = Type.getType(Supplier[].class);
public static final Type MARKER_CLASS = Type.getType(Marker.class); public static final Type MARKER_CLASS = Type.getType(Marker.class);
public static final List<String> LOGGER_METHODS = Arrays.asList("trace", "debug", "info", "warn", "error", "fatal"); public static final List<String> LOGGER_METHODS = Arrays.asList("trace", "debug", "info", "warn", "error", "fatal");
public static final String IGNORE_CHECKS_ANNOTATION = "org.elasticsearch.common.SuppressLoggerChecks"; public static final String IGNORE_CHECKS_ANNOTATION = "org.elasticsearch.common.SuppressLoggerChecks";
// types which are subject to checking when used in logger. <code>TestMessage<code> is also declared here to
// make sure this functionality works
public static final Set<Type> DEPRECATED_TYPES = Stream.of(
Type.getObjectType("org/elasticsearch/common/logging/DeprecatedMessage"),
Type.getObjectType("org/elasticsearch/test/loggerusage/TestMessage")
).collect(Collectors.toSet());
public static final Type PARAMETERIZED_MESSAGE_CLASS = Type.getType(ParameterizedMessage.class);
@SuppressForbidden(reason = "command line tool") @SuppressForbidden(reason = "command line tool")
public static void main(String... args) throws Exception { public static void main(String... args) throws Exception {
@ -290,7 +301,17 @@ public class ESLoggerUsageChecker {
} }
} else if (insn.getOpcode() == Opcodes.INVOKESPECIAL) { // constructor invocation } else if (insn.getOpcode() == Opcodes.INVOKESPECIAL) { // constructor invocation
MethodInsnNode methodInsn = (MethodInsnNode) insn; MethodInsnNode methodInsn = (MethodInsnNode) insn;
if (Type.getObjectType(methodInsn.owner).equals(PARAMETERIZED_MESSAGE_CLASS)) { Type objectType = Type.getObjectType(methodInsn.owner);
if (DEPRECATED_TYPES.contains(objectType)) {
Type[] argumentTypes = Type.getArgumentTypes(methodInsn.desc);
if (argumentTypes.length == 3 &&
argumentTypes[0].equals(STRING_CLASS) &&
argumentTypes[1].equals(STRING_CLASS) &&
argumentTypes[2].equals(OBJECT_ARRAY_CLASS)) {
checkArrayArgs(methodNode, logMessageFrames[i], arraySizeFrames[i], lineNumber, methodInsn, 0, 2);
}
}else if (objectType.equals(PARAMETERIZED_MESSAGE_CLASS)) {
Type[] argumentTypes = Type.getArgumentTypes(methodInsn.desc); Type[] argumentTypes = Type.getArgumentTypes(methodInsn.desc);
if (argumentTypes.length == 2 && if (argumentTypes.length == 2 &&
argumentTypes[0].equals(STRING_CLASS) && argumentTypes[0].equals(STRING_CLASS) &&
@ -316,8 +337,10 @@ public class ESLoggerUsageChecker {
argumentTypes[2].equals(THROWABLE_CLASS)) { argumentTypes[2].equals(THROWABLE_CLASS)) {
checkArrayArgs(methodNode, logMessageFrames[i], arraySizeFrames[i], lineNumber, methodInsn, 0, 1); checkArrayArgs(methodNode, logMessageFrames[i], arraySizeFrames[i], lineNumber, methodInsn, 0, 1);
} else { } else {
throw new IllegalStateException("Constructor invoked on " + PARAMETERIZED_MESSAGE_CLASS.getClassName() + throw new IllegalStateException("Constructor invoked on " + objectType +
" that is not supported by logger usage checker"); " that is not supported by logger usage checker"+
new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber,
"Constructor: "+ Arrays.toString(argumentTypes)));
} }
} }
} }

View File

@ -116,6 +116,20 @@ public class ESLoggerUsageTests extends ESTestCase {
assertEquals(5, ParameterizedMessage.class.getConstructors().length); assertEquals(5, ParameterizedMessage.class.getConstructors().length);
} }
public void checkForSubclasses() {
logger.debug(new TestMessage("message", "x-opaque-id"));
}
public void checkArraySizeForSubclasses() {
logger.debug(new TestMessage("message {}", "x-opaque-id", 1));
}
public void checkFailArraySizeForSubclasses(Object... arr) {
logger.debug(new TestMessage("message {}", "x-opaque-id", arr));
}
public void checkFailArraySize(String... arr) {
logger.debug(new ParameterizedMessage("text {}", (Object[])arr));
}
public void checkNumberOfArguments1() { public void checkNumberOfArguments1() {
logger.info("Hello {}", "world"); logger.info("Hello {}", "world");

View File

@ -0,0 +1,33 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.loggerusage;
import org.apache.logging.log4j.message.ParameterizedMessage;
/**
* This class is for testing that <code>ESLoggerUsageChecker</code> can find incorrect usages of LogMessages
* which are subclasses of <code>ParametrizedMessage</code>
* @see ESLoggerUsageTests
*/
class TestMessage extends ParameterizedMessage {
TestMessage(String messagePattern, String xOpaqueId, Object... args) {
super(messagePattern, args);
}
}