SlowLoggers using single logger (#56708)

Slow loggers should use single shared logger as otherwise when index is
deleted the log4j logger will remain reachable (log4j is caching) and
will create a memory leak.

closes https://github.com/elastic/elasticsearch/issues/56171
This commit is contained in:
Przemyslaw Gomulka 2020-05-27 16:38:31 +02:00 committed by GitHub
parent ff63bda4be
commit 0e34b2f42e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 506 additions and 103 deletions

View File

@ -43,22 +43,7 @@ import java.util.Map;
import java.util.concurrent.TimeUnit;
public final class IndexingSlowLog implements IndexingOperationListener {
private final Index index;
private boolean reformat;
private long indexWarnThreshold;
private long indexInfoThreshold;
private long indexDebugThreshold;
private long indexTraceThreshold;
/**
* How much of the source to log in the slowlog - 0 means log none and
* anything greater than 0 means log at least that many <em>characters</em>
* of the source.
*/
private int maxSourceCharsToLog;
private final Logger indexLogger;
private static final String INDEX_INDEXING_SLOWLOG_PREFIX = "index.indexing.slowlog";
public static final String INDEX_INDEXING_SLOWLOG_PREFIX = "index.indexing.slowlog";
public static final Setting<TimeValue> INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING =
Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn", TimeValue.timeValueNanos(-1),
TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope);
@ -76,6 +61,22 @@ public final class IndexingSlowLog implements IndexingOperationListener {
public static final Setting<SlowLogLevel> INDEX_INDEXING_SLOWLOG_LEVEL_SETTING =
new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX +".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, Property.Dynamic,
Property.IndexScope);
private final Logger indexLogger;
private final Index index;
private boolean reformat;
private long indexWarnThreshold;
private long indexInfoThreshold;
private long indexDebugThreshold;
private long indexTraceThreshold;
/*
* How much of the source to log in the slowlog - 0 means log none and anything greater than 0 means log at least that many
* <em>characters</em> of the source.
*/
private int maxSourceCharsToLog;
private SlowLogLevel level;
/**
* Reads how much of the source to log. The user can specify any value they
* like and numbers are interpreted the maximum number of characters to log
@ -92,7 +93,8 @@ public final class IndexingSlowLog implements IndexingOperationListener {
}, Property.Dynamic, Property.IndexScope);
IndexingSlowLog(IndexSettings indexSettings) {
this.indexLogger = LogManager.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index." + indexSettings.getUUID());
this.indexLogger = LogManager.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index");
Loggers.setLevel(this.indexLogger, SlowLogLevel.TRACE.name());
this.index = indexSettings.getIndex();
indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, this::setReformat);
@ -121,7 +123,7 @@ public final class IndexingSlowLog implements IndexingOperationListener {
}
private void setLevel(SlowLogLevel level) {
Loggers.setLevel(this.indexLogger, level.name());
this.level = level;
}
private void setWarnThreshold(TimeValue warnThreshold) {
@ -149,13 +151,14 @@ public final class IndexingSlowLog implements IndexingOperationListener {
if (result.getResultType() == Engine.Result.Type.SUCCESS) {
final ParsedDocument doc = indexOperation.parsedDoc();
final long tookInNanos = result.getTook();
if (indexWarnThreshold >= 0 && tookInNanos > indexWarnThreshold) {
// when logger level is more specific than WARN AND event is within threshold it should be logged
if (indexWarnThreshold >= 0 && tookInNanos > indexWarnThreshold && level.isLevelEnabledFor(SlowLogLevel.WARN)) {
indexLogger.warn( new IndexingSlowLogMessage(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} else if (indexInfoThreshold >= 0 && tookInNanos > indexInfoThreshold) {
} else if (indexInfoThreshold >= 0 && tookInNanos > indexInfoThreshold && level.isLevelEnabledFor(SlowLogLevel.INFO)) {
indexLogger.info(new IndexingSlowLogMessage(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} else if (indexDebugThreshold >= 0 && tookInNanos > indexDebugThreshold) {
} else if (indexDebugThreshold >= 0 && tookInNanos > indexDebugThreshold && level.isLevelEnabledFor(SlowLogLevel.DEBUG)) {
indexLogger.debug(new IndexingSlowLogMessage(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
} else if (indexTraceThreshold >= 0 && tookInNanos > indexTraceThreshold) {
} else if (indexTraceThreshold >= 0 && tookInNanos > indexTraceThreshold && level.isLevelEnabledFor(SlowLogLevel.TRACE)) {
indexLogger.trace( new IndexingSlowLogMessage(index, doc, tookInNanos, reformat, maxSourceCharsToLog));
}
}
@ -258,7 +261,7 @@ public final class IndexingSlowLog implements IndexingOperationListener {
}
SlowLogLevel getLevel() {
return SlowLogLevel.parse(indexLogger.getLevel().name());
return level;
}
}

View File

@ -33,8 +33,8 @@ import org.elasticsearch.index.shard.SearchOperationListener;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.tasks.Task;
import java.util.Arrays;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -57,7 +57,7 @@ public final class SearchSlowLog implements SearchOperationListener {
private final Logger queryLogger;
private final Logger fetchLogger;
private static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog";
static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog";
public static final Setting<TimeValue> INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING =
Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.warn", TimeValue.timeValueNanos(-1),
TimeValue.timeValueMillis(-1), Property.Dynamic, Property.IndexScope);
@ -87,11 +87,13 @@ public final class SearchSlowLog implements SearchOperationListener {
Property.IndexScope);
private static final ToXContent.Params FORMAT_PARAMS = new ToXContent.MapParams(Collections.singletonMap("pretty", "false"));
private SlowLogLevel level;
public SearchSlowLog(IndexSettings indexSettings) {
this.queryLogger = LogManager.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query." + indexSettings.getUUID());
this.fetchLogger = LogManager.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch." + indexSettings.getUUID());
this.queryLogger = LogManager.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query");
this.fetchLogger = LogManager.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch");
Loggers.setLevel(this.fetchLogger, SlowLogLevel.TRACE.name());
Loggers.setLevel(this.queryLogger, SlowLogLevel.TRACE.name());
indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING,
this::setQueryWarnThreshold);
@ -124,32 +126,31 @@ public final class SearchSlowLog implements SearchOperationListener {
}
private void setLevel(SlowLogLevel level) {
Loggers.setLevel(queryLogger, level.name());
Loggers.setLevel(fetchLogger, level.name());
this.level = level;
}
@Override
public void onQueryPhase(SearchContext context, long tookInNanos) {
if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) {
if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold && level.isLevelEnabledFor(SlowLogLevel.WARN)) {
queryLogger.warn(new SearchSlowLogMessage(context, tookInNanos));
} else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold) {
} else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold && level.isLevelEnabledFor(SlowLogLevel.INFO)) {
queryLogger.info(new SearchSlowLogMessage(context, tookInNanos));
} else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold) {
} else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold && level.isLevelEnabledFor(SlowLogLevel.DEBUG)) {
queryLogger.debug(new SearchSlowLogMessage(context, tookInNanos));
} else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold) {
} else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold && level.isLevelEnabledFor(SlowLogLevel.TRACE)) {
queryLogger.trace(new SearchSlowLogMessage(context, tookInNanos));
}
}
@Override
public void onFetchPhase(SearchContext context, long tookInNanos) {
if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) {
if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold && level.isLevelEnabledFor(SlowLogLevel.WARN)) {
fetchLogger.warn(new SearchSlowLogMessage(context, tookInNanos));
} else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold) {
} else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold && level.isLevelEnabledFor(SlowLogLevel.INFO)) {
fetchLogger.info(new SearchSlowLogMessage(context, tookInNanos));
} else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold) {
} else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold && level.isLevelEnabledFor(SlowLogLevel.DEBUG)) {
fetchLogger.debug(new SearchSlowLogMessage(context, tookInNanos));
} else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold) {
} else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold && level.isLevelEnabledFor(SlowLogLevel.TRACE)) {
fetchLogger.trace(new SearchSlowLogMessage(context, tookInNanos));
}
}
@ -303,7 +304,6 @@ public final class SearchSlowLog implements SearchOperationListener {
}
SlowLogLevel getLevel() {
assert queryLogger.getLevel().equals(fetchLogger.getLevel());
return SlowLogLevel.parse(queryLogger.getLevel().name());
return level;
}
}

View File

@ -21,8 +21,23 @@ package org.elasticsearch.index;
import java.util.Locale;
public enum SlowLogLevel {
WARN, TRACE, INFO, DEBUG;
WARN(3), // most specific - little logging
INFO(2),
DEBUG(1),
TRACE(0); // least specific - lots of logging
private final int specificity;
SlowLogLevel(int specificity) {
this.specificity = specificity;
}
public static SlowLogLevel parse(String level) {
return valueOf(level.toUpperCase(Locale.ROOT));
}
boolean isLevelEnabledFor(SlowLogLevel levelToBeUsed) {
// example: this.info(2) tries to log with levelToBeUsed.warn(3) - should allow
return this.specificity <= levelToBeUsed.specificity;
}
}

View File

@ -22,9 +22,6 @@ package org.elasticsearch.common.logging;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.filter.RegexFilter;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.test.ESTestCase;
@ -38,23 +35,6 @@ import static org.hamcrest.Matchers.nullValue;
public class LoggersTests extends ESTestCase {
static class MockAppender extends AbstractAppender {
private LogEvent lastEvent;
MockAppender(final String name) throws IllegalAccessException {
super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null);
}
@Override
public void append(LogEvent event) {
lastEvent = event.toImmutable();
}
ParameterizedMessage lastParameterizedMessage() {
return (ParameterizedMessage) lastEvent.getMessage();
}
}
public void testParameterizedMessageLambda() throws Exception {
final MockAppender appender = new MockAppender("trace_appender");
appender.start();

View File

@ -0,0 +1,48 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.logging;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.filter.RegexFilter;
import org.apache.logging.log4j.message.ParameterizedMessage;
public class MockAppender extends AbstractAppender {
public LogEvent lastEvent;
public MockAppender(final String name) throws IllegalAccessException {
super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null);
}
@Override
public void append(LogEvent event) {
lastEvent = event.toImmutable();
}
ParameterizedMessage lastParameterizedMessage() {
return (ParameterizedMessage) lastEvent.getMessage();
}
public LogEvent getLastEventAndReset() {
LogEvent toReturn = lastEvent;
lastEvent = null;
return toReturn;
}
}

View File

@ -20,20 +20,33 @@
package org.elasticsearch.index;
import com.fasterxml.jackson.core.JsonParseException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.Term;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.logging.MockAppender;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexingSlowLog.IndexingSlowLogMessage;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.InternalEngineTests;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.mockito.Mockito;
import java.io.IOException;
import java.io.UncheckedIOException;
@ -48,6 +61,141 @@ import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
public class IndexingSlowLogTests extends ESTestCase {
static MockAppender appender;
static Logger testLogger1 = LogManager.getLogger(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_PREFIX + ".index");
@BeforeClass
public static void init() throws IllegalAccessException {
appender = new MockAppender("trace_appender");
appender.start();
Loggers.addAppender(testLogger1, appender);
}
@AfterClass
public static void cleanup() {
appender.stop();
Loggers.removeAppender(testLogger1, appender);
}
public void testLevelPrecedence() {
String uuid = UUIDs.randomBase64UUID();
IndexMetadata metadata = createIndexMetadata(SlowLogLevel.WARN, "index-precedence", uuid);
IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY);
IndexingSlowLog log = new IndexingSlowLog(settings);
ParsedDocument doc = InternalEngineTests.createParsedDoc("1", null);
Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId("doc_id")), randomNonNegativeLong(), doc);
Engine.IndexResult result = Mockito.mock(Engine.IndexResult.class);//(0, 0, SequenceNumbers.UNASSIGNED_SEQ_NO, false);
Mockito.when(result.getResultType()).thenReturn(Engine.Result.Type.SUCCESS);
{
//level set to WARN, should only log when WARN limit is breached
Mockito.when(result.getTook()).thenReturn(40L);
log.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNull(appender.getLastEventAndReset());
Mockito.when(result.getTook()).thenReturn(41L);
log.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNotNull(appender.getLastEventAndReset());
}
{
// level set INFO, should log when INFO level is breached
settings.updateIndexMetadata(createIndexMetadata(SlowLogLevel.INFO, "index", uuid));
Mockito.when(result.getTook()).thenReturn(30L);
log.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNull(appender.getLastEventAndReset());
Mockito.when(result.getTook()).thenReturn(31L);
log.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNotNull(appender.getLastEventAndReset());
}
{
// level set DEBUG, should log when DEBUG level is breached
settings.updateIndexMetadata(createIndexMetadata(SlowLogLevel.DEBUG, "index", uuid));
Mockito.when(result.getTook()).thenReturn(20L);
log.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNull(appender.getLastEventAndReset());
Mockito.when(result.getTook()).thenReturn(21L);
log.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNotNull(appender.getLastEventAndReset());
}
{
// level set TRACE, should log when TRACE level is breached
settings.updateIndexMetadata(createIndexMetadata(SlowLogLevel.TRACE, "index", uuid));
Mockito.when(result.getTook()).thenReturn(10L);
log.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNull(appender.getLastEventAndReset());
Mockito.when(result.getTook()).thenReturn(11L);
log.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNotNull(appender.getLastEventAndReset());
}
}
public void testTwoLoggersDifferentLevel() {
IndexSettings index1Settings = new IndexSettings(createIndexMetadata(SlowLogLevel.WARN, "index1", UUIDs.randomBase64UUID()),
Settings.EMPTY);
IndexingSlowLog log1 = new IndexingSlowLog(index1Settings);
IndexSettings index2Settings = new IndexSettings(createIndexMetadata(SlowLogLevel.TRACE, "index2", UUIDs.randomBase64UUID()),
Settings.EMPTY);
IndexingSlowLog log2 = new IndexingSlowLog(index2Settings);
ParsedDocument doc = InternalEngineTests.createParsedDoc("1", null);
Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId("doc_id")), randomNonNegativeLong(), doc);
Engine.IndexResult result = Mockito.mock(Engine.IndexResult.class);
Mockito.when(result.getResultType()).thenReturn(Engine.Result.Type.SUCCESS);
{
// level set WARN, should not log
Mockito.when(result.getTook()).thenReturn(11L);
log1.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNull(appender.getLastEventAndReset());
// level set TRACE, should log
log2.postIndex(ShardId.fromString("[index][123]"), index, result);
assertNotNull(appender.getLastEventAndReset());
}
}
public void testMultipleSlowLoggersUseSingleLog4jLogger() {
LoggerContext context = (LoggerContext) LogManager.getContext(false);
IndexSettings index1Settings = new IndexSettings(createIndexMetadata(SlowLogLevel.WARN, "index1", UUIDs.randomBase64UUID()),
Settings.EMPTY);
IndexingSlowLog log1 = new IndexingSlowLog(index1Settings);
int numberOfLoggersBefore = context.getLoggers().size();
IndexSettings index2Settings = new IndexSettings(createIndexMetadata(SlowLogLevel.TRACE, "index2", UUIDs.randomBase64UUID()),
Settings.EMPTY);
IndexingSlowLog log2 = new IndexingSlowLog(index2Settings);
context = (LoggerContext) LogManager.getContext(false);
int numberOfLoggersAfter = context.getLoggers().size();
assertThat(numberOfLoggersAfter, equalTo(numberOfLoggersBefore));
}
private IndexMetadata createIndexMetadata(SlowLogLevel level, String index, String uuid) {
return newIndexMeta(index, Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_INDEX_UUID, uuid)
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level)
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "10nanos")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "20nanos")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "30nanos")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "40nanos")
.build());
}
public void testSlowLogMessageHasJsonFields() throws IOException {
BytesReference source = BytesReference.bytes(JsonXContent.contentBuilder()
@ -59,12 +207,12 @@ public class IndexingSlowLogTests extends ESTestCase {
// Turning off document logging doesn't log source[]
IndexingSlowLogMessage p = new IndexingSlowLogMessage(index, pd, 10, true, 0);
assertThat(p.getValueFor("message"),equalTo("[foo/123]"));
assertThat(p.getValueFor("took"),equalTo("10nanos"));
assertThat(p.getValueFor("took_millis"),equalTo("0"));
assertThat(p.getValueFor("doc_type"),equalTo("test"));
assertThat(p.getValueFor("id"),equalTo("id"));
assertThat(p.getValueFor("routing"),equalTo("routingValue"));
assertThat(p.getValueFor("message"), equalTo("[foo/123]"));
assertThat(p.getValueFor("took"), equalTo("10nanos"));
assertThat(p.getValueFor("took_millis"), equalTo("0"));
assertThat(p.getValueFor("doc_type"), equalTo("test"));
assertThat(p.getValueFor("id"), equalTo("id"));
assertThat(p.getValueFor("routing"), equalTo("routingValue"));
assertThat(p.getValueFor("source"), is(emptyOrNullString()));
// Turning on document logging logs the whole thing
@ -77,7 +225,7 @@ public class IndexingSlowLogTests extends ESTestCase {
.startObject().field("foo", "bar").endObject());
ParsedDocument pd = new ParsedDocument(new NumericDocValuesField("version", 1),
SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id",
"test", null, null, source, XContentType.JSON, null);
"test", null, null, source, XContentType.JSON, null);
Index index = new Index("foo", "123");
// Turning off document logging doesn't log source[]
IndexingSlowLogMessage p = new IndexingSlowLogMessage(index, pd, 10, true, 0);
@ -103,15 +251,15 @@ public class IndexingSlowLogTests extends ESTestCase {
"test", null, null, source, XContentType.JSON, null);
final UncheckedIOException e = expectThrows(UncheckedIOException.class,
()->new IndexingSlowLogMessage(index, doc, 10, true, 3));
() -> new IndexingSlowLogMessage(index, doc, 10, true, 3));
assertThat(e, hasToString(containsString("_failed_to_convert_[Unrecognized token 'invalid':"
+ " was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')\\n"
+ " at [Source: (org.elasticsearch.common.bytes.AbstractBytesReference$MarkSupportingStreamInputWrapper)")));
assertNotNull(e.getCause());
assertThat(e.getCause(), instanceOf(JsonParseException.class));
assertThat(e.getCause(), hasToString(containsString("Unrecognized token 'invalid':"
+ " was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')\n"
+ " at [Source: (org.elasticsearch.common.bytes.AbstractBytesReference$MarkSupportingStreamInputWrapper)")));
+ " was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')\n"
+ " at [Source: (org.elasticsearch.common.bytes.AbstractBytesReference$MarkSupportingStreamInputWrapper)")));
}
public void testReformatSetting() {
@ -238,9 +386,9 @@ public class IndexingSlowLogTests extends ESTestCase {
settings.updateIndexMetadata(newIndexMeta("index",
Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "120ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "220ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "320ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "420ms").build()));
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "220ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "320ms")
.put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "420ms").build()));
assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getIndexTraceThreshold());
@ -308,7 +456,7 @@ public class IndexingSlowLogTests extends ESTestCase {
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
final IllegalArgumentException cause = (IllegalArgumentException) e.getCause();
final String causeExpected =
"failed to parse setting [" + key + "] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized";
"failed to parse setting [" + key + "] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized";
assertThat(cause, hasToString(containsString(causeExpected)));
}

View File

@ -19,10 +19,15 @@
package org.elasticsearch.index;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LoggerContext;
import org.elasticsearch.Version;
import org.elasticsearch.action.search.SearchShardTask;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.logging.MockAppender;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
@ -35,6 +40,8 @@ import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.TestSearchContext;
import org.hamcrest.Matchers;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Arrays;
@ -50,12 +57,31 @@ import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
public class SearchSlowLogTests extends ESSingleNodeTestCase {
@Override
protected SearchContext createSearchContext(IndexService indexService) {
return createSearchContext(indexService, new String[]{});
static MockAppender appender;
static Logger queryLog = LogManager.getLogger(SearchSlowLog.INDEX_SEARCH_SLOWLOG_PREFIX + ".query");
static Logger fetchLog = LogManager.getLogger(SearchSlowLog.INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch");
@BeforeClass
public static void init() throws IllegalAccessException {
appender = new MockAppender("trace_appender");
appender.start();
Loggers.addAppender(queryLog, appender);
Loggers.addAppender(fetchLog, appender);
}
protected SearchContext createSearchContext(IndexService indexService, String ... groupStats) {
@AfterClass
public static void cleanup() {
appender.stop();
Loggers.removeAppender(queryLog, appender);
Loggers.removeAppender(fetchLog, appender);
}
@Override
protected SearchContext createSearchContext(IndexService indexService) {
return createSearchContext(indexService, new String[]{});
}
protected SearchContext createSearchContext(IndexService indexService, String... groupStats) {
BigArrays bigArrays = indexService.getBigArrays();
final ShardSearchRequest request =
new ShardSearchRequest(new ShardId(indexService.index(), 0), new String[0], 0L, null);
@ -69,16 +95,141 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
public ShardSearchRequest request() {
return request;
}
@Override
public SearchShardTask getTask() {
return super.getTask();
}
};
}
public void testLevelPrecedence() {
SearchContext ctx = searchContextWithSourceAndTask(createIndex("index"));
String uuid = UUIDs.randomBase64UUID();
IndexSettings settings =
new IndexSettings(createIndexMetadata(SlowLogLevel.WARN, "index", uuid), Settings.EMPTY);
SearchSlowLog log = new SearchSlowLog(settings);
{
//level set to WARN, should only log when WARN limit is breached
log.onQueryPhase(ctx, 40L);
assertNull(appender.getLastEventAndReset());
log.onQueryPhase(ctx, 41L);
assertNotNull(appender.getLastEventAndReset());
log.onFetchPhase(ctx, 40L);
assertNull(appender.getLastEventAndReset());
log.onFetchPhase(ctx, 41L);
assertNotNull(appender.getLastEventAndReset());
}
{
// level set INFO, should log when INFO level is breached
settings.updateIndexMetadata(createIndexMetadata(SlowLogLevel.INFO, "index", uuid));
log.onQueryPhase(ctx, 30L);
assertNull(appender.getLastEventAndReset());
log.onQueryPhase(ctx, 31L);
assertNotNull(appender.getLastEventAndReset());
log.onFetchPhase(ctx, 30L);
assertNull(appender.getLastEventAndReset());
log.onFetchPhase(ctx, 31L);
assertNotNull(appender.getLastEventAndReset());
}
{
// level set DEBUG, should log when DEBUG level is breached
settings.updateIndexMetadata(createIndexMetadata(SlowLogLevel.DEBUG, "index", uuid));
log.onQueryPhase(ctx, 20L);
assertNull(appender.getLastEventAndReset());
log.onQueryPhase(ctx, 21L);
assertNotNull(appender.getLastEventAndReset());
log.onFetchPhase(ctx, 20L);
assertNull(appender.getLastEventAndReset());
log.onFetchPhase(ctx, 21L);
assertNotNull(appender.getLastEventAndReset());
}
{
// level set TRACE, should log when TRACE level is breached
settings.updateIndexMetadata(createIndexMetadata(SlowLogLevel.TRACE, "index", uuid));
log.onQueryPhase(ctx, 10L);
assertNull(appender.getLastEventAndReset());
log.onQueryPhase(ctx, 11L);
assertNotNull(appender.getLastEventAndReset());
log.onFetchPhase(ctx, 10L);
assertNull(appender.getLastEventAndReset());
log.onFetchPhase(ctx, 11L);
assertNotNull(appender.getLastEventAndReset());
}
}
public void testTwoLoggersDifferentLevel() {
SearchContext ctx1 = searchContextWithSourceAndTask(createIndex("index-1"));
SearchContext ctx2 = searchContextWithSourceAndTask(createIndex("index-2"));
IndexSettings settings1 =
new IndexSettings(createIndexMetadata(SlowLogLevel.WARN, "index-1", UUIDs.randomBase64UUID()), Settings.EMPTY);
SearchSlowLog log1 = new SearchSlowLog(settings1);
IndexSettings settings2 =
new IndexSettings(createIndexMetadata(SlowLogLevel.TRACE, "index-2", UUIDs.randomBase64UUID()), Settings.EMPTY);
SearchSlowLog log2 = new SearchSlowLog(settings2);
{
// level set WARN, should not log
log1.onQueryPhase(ctx1, 11L);
assertNull(appender.getLastEventAndReset());
log1.onFetchPhase(ctx1, 11L);
assertNull(appender.getLastEventAndReset());
// level set TRACE, should log
log2.onQueryPhase(ctx2, 11L);
assertNotNull(appender.getLastEventAndReset());
log2.onFetchPhase(ctx2, 11L);
assertNotNull(appender.getLastEventAndReset());
}
}
public void testMultipleSlowLoggersUseSingleLog4jLogger() {
LoggerContext context = (LoggerContext) LogManager.getContext(false);
SearchContext ctx1 = searchContextWithSourceAndTask(createIndex("index-1"));
IndexSettings settings1 =
new IndexSettings(createIndexMetadata(SlowLogLevel.WARN, "index-1", UUIDs.randomBase64UUID()), Settings.EMPTY);
SearchSlowLog log1 = new SearchSlowLog(settings1);
int numberOfLoggersBefore = context.getLoggers().size();
SearchContext ctx2 = searchContextWithSourceAndTask(createIndex("index-2"));
IndexSettings settings2 =
new IndexSettings(createIndexMetadata(SlowLogLevel.TRACE, "index-2", UUIDs.randomBase64UUID()), Settings.EMPTY);
SearchSlowLog log2 = new SearchSlowLog(settings2);
int numberOfLoggersAfter = context.getLoggers().size();
assertThat(numberOfLoggersAfter, equalTo(numberOfLoggersBefore));
}
private IndexMetadata createIndexMetadata(SlowLogLevel level, String index, String uuid) {
return newIndexMeta(index, Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_INDEX_UUID, uuid)
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), level)
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING.getKey(), "10nanos")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "20nanos")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "30nanos")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "40nanos")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING.getKey(), "10nanos")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "20nanos")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "30nanos")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "40nanos")
.build());
}
public void testSlowLogHasJsonFields() throws IOException {
IndexService index = createIndex("foo");
SearchContext searchContext = createSearchContext(index);
SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery());
searchContext.request().source(source);
searchContext.setTask(new SearchShardTask(0, "n/a", "n/a", "test", null,
Collections.singletonMap(Task.X_OPAQUE_ID, "my_id")));
SearchContext searchContext = searchContextWithSourceAndTask(index);
SearchSlowLog.SearchSlowLogMessage p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
assertThat(p.getValueFor("message"), equalTo("[foo][0]"));
@ -94,18 +245,14 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
public void testSlowLogWithTypes() throws IOException {
IndexService index = createIndex("foo");
SearchContext searchContext = createSearchContext(index);
SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery());
searchContext.request().source(source);
searchContext.setTask(new SearchShardTask(0, "n/a", "n/a", "test", null,
Collections.singletonMap(Task.X_OPAQUE_ID, "my_id")));
SearchContext searchContext = searchContextWithSourceAndTask(index);
searchContext.getQueryShardContext().setTypes("type1", "type2");
SearchSlowLog.SearchSlowLogMessage p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
assertThat(p.getValueFor("types"), equalTo("[\\\"type1\\\", \\\"type2\\\"]"));
searchContext.getQueryShardContext().setTypes("type1");
p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
assertThat(p.getValueFor("types"), equalTo("[\\\"type1\\\"]"));
searchContext.getQueryShardContext().setTypes();
@ -115,7 +262,7 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
public void testSlowLogsWithStats() throws IOException {
IndexService index = createIndex("foo");
SearchContext searchContext = createSearchContext(index,"group1");
SearchContext searchContext = createSearchContext(index, "group1");
SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery());
searchContext.request().source(source);
searchContext.setTask(new SearchShardTask(0, "n/a", "n/a", "test", null,
@ -135,11 +282,7 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
public void testSlowLogSearchContextPrinterToLog() throws IOException {
IndexService index = createIndex("foo");
SearchContext searchContext = createSearchContext(index);
SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery());
searchContext.request().source(source);
searchContext.setTask(new SearchShardTask(0, "n/a", "n/a", "test", null,
Collections.singletonMap(Task.X_OPAQUE_ID, "my_id")));
SearchContext searchContext = searchContextWithSourceAndTask(index);
SearchSlowLog.SearchSlowLogMessage p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
assertThat(p.getFormattedMessage(), startsWith("[foo][0]"));
// Makes sure that output doesn't contain any new lines
@ -229,9 +372,9 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
settings.updateIndexMetadata(newIndexMeta("index",
Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING.getKey(), "120ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "220ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "320ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "420ms").build()));
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "220ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "320ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "420ms").build()));
assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getQueryTraceThreshold());
@ -309,9 +452,9 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
settings.updateIndexMetadata(newIndexMeta("index",
Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING.getKey(), "120ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "220ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "320ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "420ms").build()));
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "220ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "320ms")
.put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "420ms").build()));
assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getFetchTraceThreshold());
@ -379,7 +522,7 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
final IllegalArgumentException cause = (IllegalArgumentException) e.getCause();
final String causeExpected =
"failed to parse setting [" + key + "] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized";
"failed to parse setting [" + key + "] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized";
assertThat(cause, hasToString(containsString(causeExpected)));
}
@ -392,4 +535,13 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
IndexMetadata metadata = IndexMetadata.builder(name).settings(build).build();
return metadata;
}
private SearchContext searchContextWithSourceAndTask(IndexService index) {
SearchContext ctx = createSearchContext(index);
SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery());
ctx.request().source(source);
ctx.setTask(new SearchShardTask(0, "n/a", "n/a", "test", null,
Collections.singletonMap(Task.X_OPAQUE_ID, "my_id")));
return ctx;
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index;
import org.elasticsearch.test.ESTestCase;
public class SlowLogLevelTests extends ESTestCase {
public void testTracePrecedence() {
assertTrue(SlowLogLevel.TRACE.isLevelEnabledFor(SlowLogLevel.TRACE));
assertTrue(SlowLogLevel.TRACE.isLevelEnabledFor(SlowLogLevel.DEBUG));
assertTrue(SlowLogLevel.TRACE.isLevelEnabledFor(SlowLogLevel.INFO));
assertTrue(SlowLogLevel.TRACE.isLevelEnabledFor(SlowLogLevel.WARN));
}
public void testDebugPrecedence() {
assertFalse(SlowLogLevel.DEBUG.isLevelEnabledFor(SlowLogLevel.TRACE));
assertTrue(SlowLogLevel.DEBUG.isLevelEnabledFor(SlowLogLevel.DEBUG));
assertTrue(SlowLogLevel.DEBUG.isLevelEnabledFor(SlowLogLevel.INFO));
assertTrue(SlowLogLevel.DEBUG.isLevelEnabledFor(SlowLogLevel.WARN));
}
public void testInfoPrecedence() {
assertFalse(SlowLogLevel.INFO.isLevelEnabledFor(SlowLogLevel.TRACE));
assertFalse(SlowLogLevel.INFO.isLevelEnabledFor(SlowLogLevel.DEBUG));
assertTrue(SlowLogLevel.INFO.isLevelEnabledFor(SlowLogLevel.INFO));
assertTrue(SlowLogLevel.INFO.isLevelEnabledFor(SlowLogLevel.WARN));
}
public void testWarnPrecedence() {
assertFalse(SlowLogLevel.WARN.isLevelEnabledFor(SlowLogLevel.TRACE));
assertFalse(SlowLogLevel.WARN.isLevelEnabledFor(SlowLogLevel.DEBUG));
assertFalse(SlowLogLevel.WARN.isLevelEnabledFor(SlowLogLevel.INFO));
assertTrue(SlowLogLevel.WARN.isLevelEnabledFor(SlowLogLevel.WARN));
}
}