Add `slf4j` requst logger (#3146)

* Add `slf4j` requst logger

* Address comments

* Fix conflicts with master

* Fix removed map value
This commit is contained in:
Charles Allen 2016-07-29 15:15:41 -07:00 committed by Fangjin Yang
parent e5397ed316
commit d04af6aee4
6 changed files with 493 additions and 2 deletions

View File

@ -91,7 +91,7 @@ All nodes that can serve queries can also log the query requests they see.
|Property|Description|Default|
|--------|-----------|-------|
|`druid.request.logging.type`|Choices: noop, file, emitter. How to log every query request.|noop|
|`druid.request.logging.type`|Choices: noop, file, emitter, slf4j. How to log every query request.|noop|
Note that, you can enable sending all the HTTP requests to log by setting "io.druid.jetty.RequestLog" to DEBUG level. See [Logging](../configuration/logging.html)
@ -111,6 +111,28 @@ Every request is emitted to some external location.
|--------|-----------|-------|
|`druid.request.logging.feed`|Feed name for requests.|none|
#### SLF4J Request Logging
Every request is logged via SLF4J. Queries are serialized into JSON in the log message regardless of the SJF4J format specification. They will be logged under the class `io.druid.server.log.LoggingRequestLogger`.
|Property|Description|Default|
|--------|-----------|-------|
|`druid.request.logging.setMDC`|If MDC entries should be set in the log entry. Your logging setup still has to be configured to handle MDC to format this data|false|
|`druid.request.logging.setContextMDC`|If the druid query `context` should be added to the MDC entries. Has no effect unless `setMDC` is `true`|false|
MDC fields populated with `setMDC`:
|MDC field|Description|
|---------|-----------|
|`queryId` |The query ID|
|`dataSource`|The datasource the query was against|
|`queryType` |The type of the query|
|`hasFilters`|If the query has any filters|
|`remoteAddr`|The remote address of the requesting client|
|`duration` |The duration of the query interval|
|`resultOrdering`|The ordering of results|
|`descending`|If the query is a descending query|
### Enabling Metrics
Druid nodes periodically emit metrics and different metrics monitors can be included. Each node can overwrite the default list of monitors.

View File

@ -27,6 +27,7 @@ import io.druid.initialization.DruidModule;
import io.druid.query.QuerySegmentWalker;
import io.druid.server.log.EmittingRequestLoggerProvider;
import io.druid.server.log.FileRequestLoggerProvider;
import io.druid.server.log.LoggingRequestLoggerProvider;
import io.druid.server.log.RequestLogger;
import io.druid.server.log.RequestLoggerProvider;
@ -50,7 +51,11 @@ public class QueryableModule implements DruidModule
{
return Arrays.<Module>asList(
new SimpleModule("QueryableModule")
.registerSubtypes(EmittingRequestLoggerProvider.class, FileRequestLoggerProvider.class)
.registerSubtypes(
EmittingRequestLoggerProvider.class,
FileRequestLoggerProvider.class,
LoggingRequestLoggerProvider.class
)
);
}
}

View File

@ -0,0 +1,105 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.log;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.metamx.common.logger.Logger;
import io.druid.query.Query;
import io.druid.server.RequestLogLine;
import org.slf4j.MDC;
import java.io.IOException;
import java.util.Map;
public class LoggingRequestLogger implements RequestLogger
{
private static final Logger LOG = new Logger(LoggingRequestLogger.class);
private final ObjectMapper mapper;
private final boolean setMDC;
private final boolean setContextMDC;
public LoggingRequestLogger(
ObjectMapper mapper,
boolean setMDC,
boolean setContextMDC
)
{
this.mapper = mapper;
this.setMDC = setMDC;
this.setContextMDC = setContextMDC;
}
@Override
public void log(RequestLogLine requestLogLine) throws IOException
{
final Map mdc = MDC.getCopyOfContextMap();
// MDC must be set during the `LOG.info` call at the end of the try block.
try {
if (setMDC) {
try {
final Query query = requestLogLine.getQuery();
MDC.put("queryId", query.getId());
MDC.put("dataSource", query.getDataSource().toString());
MDC.put("queryType", query.getType());
MDC.put("hasFilters", Boolean.toString(query.hasFilters()));
MDC.put("remoteAddr", requestLogLine.getRemoteAddr());
MDC.put("duration", query.getDuration().toString());
MDC.put("descending", Boolean.toString(query.isDescending()));
if (setContextMDC) {
final Iterable<Map.Entry<String, Object>> entries = query.getContext() == null
? ImmutableList.<Map.Entry<String, Object>>of()
: query.getContext().entrySet();
for (Map.Entry<String, Object> entry : entries) {
MDC.put(entry.getKey(), entry.getValue() == null ? "NULL" : entry.getValue().toString());
}
}
}
catch (RuntimeException re) {
LOG.error(re, "Error preparing MDC");
}
}
final String line = requestLogLine.getLine(mapper);
// MDC must be set here
LOG.info("%s", line);
}
finally {
if (setMDC) {
if (mdc != null) {
MDC.setContextMap(mdc);
} else {
MDC.clear();
}
}
}
}
public boolean isSetMDC()
{
return setMDC;
}
public boolean isSetContextMDC()
{
return setContextMDC;
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.log;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.druid.guice.annotations.Json;
@JsonTypeName("slf4j")
public class LoggingRequestLoggerProvider implements RequestLoggerProvider
{
@JacksonInject
@Json
public ObjectMapper mapper;
@JsonProperty
public boolean setMDC = false;
@JsonProperty
public boolean setContextMDC = false;
@Override
public RequestLogger get()
{
return new LoggingRequestLogger(mapper, setMDC, setContextMDC);
}
}

View File

@ -0,0 +1,89 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.log;
import com.google.common.collect.ImmutableList;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.name.Names;
import io.druid.guice.GuiceInjectors;
import io.druid.guice.JsonConfigProvider;
import io.druid.guice.JsonConfigurator;
import io.druid.initialization.Initialization;
import org.junit.Assert;
import org.junit.Test;
import java.util.Properties;
import java.util.UUID;
public class LoggingRequestLoggerProviderTest
{
private final String propertyPrefix = UUID.randomUUID().toString().replace('-', '_');
private final JsonConfigProvider<RequestLoggerProvider> provider = JsonConfigProvider.of(
propertyPrefix,
RequestLoggerProvider.class
);
private final Injector injector = makeInjector();
@Test
public void testDefaultConfigParsing()
{
final Properties properties = new Properties();
properties.put(propertyPrefix + ".type", "slf4j");
provider.inject(properties, injector.getInstance(JsonConfigurator.class));
final LoggingRequestLogger requestLogger = (LoggingRequestLogger) provider.get().get().get();
Assert.assertFalse(requestLogger.isSetContextMDC());
Assert.assertFalse(requestLogger.isSetMDC());
}
@Test
public void testConfigParsingFull()
{
final Properties properties = new Properties();
properties.put(propertyPrefix + ".type", "slf4j");
properties.put(propertyPrefix + ".setMDC", "true");
properties.put(propertyPrefix + ".setContextMDC", "true");
provider.inject(properties, injector.getInstance(JsonConfigurator.class));
final LoggingRequestLogger requestLogger = (LoggingRequestLogger) provider.get().get().get();
Assert.assertTrue(requestLogger.isSetContextMDC());
Assert.assertTrue(requestLogger.isSetMDC());
}
private Injector makeInjector()
{
return Initialization.makeInjectorWithModules(
GuiceInjectors.makeStartupInjector(),
ImmutableList.<Module>of(
new Module()
{
@Override
public void configure(Binder binder)
{
binder.bind(Key.get(String.class, Names.named("serviceName"))).toInstance("some service");
binder.bind(Key.get(Integer.class, Names.named("servicePort"))).toInstance(0);
JsonConfigProvider.bind(binder, propertyPrefix, RequestLoggerProvider.class);
}
}
)
);
}
}

View File

@ -0,0 +1,224 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.log;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableMap;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.BaseQuery;
import io.druid.query.DataSource;
import io.druid.query.LegacyDataSource;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QuerySegmentWalker;
import io.druid.query.filter.DimFilter;
import io.druid.query.spec.QuerySegmentSpec;
import io.druid.server.QueryStats;
import io.druid.server.RequestLogLine;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.appender.OutputStreamAppender;
import org.apache.logging.log4j.core.layout.JsonLayout;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
// Mostly just test that it doesn't crash
public class LoggingRequestLoggerTest
{
private static final ObjectMapper mapper = new DefaultObjectMapper();
private static final ByteArrayOutputStream baos = new ByteArrayOutputStream();
private static Appender appender;
final DateTime timestamp = DateTime.parse("2016-01-01T00:00:00Z");
final String remoteAddr = "some.host.tld";
final Map<String, Object> queryContext = ImmutableMap.<String, Object>of("foo", "bar");
final Query query = new FakeQuery(
new LegacyDataSource("datasource"),
new QuerySegmentSpec()
{
@Override
public List<Interval> getIntervals()
{
return Collections.singletonList(Interval.parse("2016-01-01T00Z/2016-01-02T00Z"));
}
@Override
public <T> QueryRunner<T> lookup(Query<T> query, QuerySegmentWalker walker)
{
return null;
}
}, false, queryContext
);
final QueryStats queryStats = new QueryStats(ImmutableMap.<String, Object>of());
final RequestLogLine logLine = new RequestLogLine(
timestamp,
remoteAddr,
query,
queryStats
);
@BeforeClass
public static void setUpStatic() throws Exception
{
appender = OutputStreamAppender
.newBuilder()
.setName("test stream")
.setTarget(baos)
.setLayout(JsonLayout.createLayout(false, true, false, true, true, Charsets.UTF_8))
.build();
final org.apache.logging.log4j.core.Logger logger = (org.apache.logging.log4j.core.Logger)
LogManager.getLogger(LoggingRequestLogger.class);
appender.start();
logger.addAppender(appender);
}
@After
public void tearDown()
{
baos.reset();
}
@AfterClass
public static void tearDownStatic()
{
final org.apache.logging.log4j.core.Logger logger = (org.apache.logging.log4j.core.Logger) LogManager.getLogger(
LoggingRequestLogger.class);
logger.removeAppender(appender);
appender.stop();
}
@Test
public void testSimpleLogging() throws Exception
{
final LoggingRequestLogger requestLogger = new LoggingRequestLogger(new DefaultObjectMapper(), false, false);
requestLogger.log(logLine);
}
@Test
public void testLoggingMDC() throws Exception
{
final LoggingRequestLogger requestLogger = new LoggingRequestLogger(new DefaultObjectMapper(), true, false);
requestLogger.log(logLine);
final Map<String, Object> map = readContextMap(baos.toByteArray());
Assert.assertEquals("datasource", map.get("dataSource"));
Assert.assertEquals("PT86400S", map.get("duration"));
Assert.assertEquals("false", map.get("hasFilters"));
Assert.assertEquals("fake", map.get("queryType"));
Assert.assertEquals("some.host.tld", map.get("remoteAddr"));
Assert.assertEquals("false", map.get("descending"));
Assert.assertNull(map.get("foo"));
}
@Test
public void testLoggingMDCContext() throws Exception
{
final LoggingRequestLogger requestLogger = new LoggingRequestLogger(new DefaultObjectMapper(), true, true);
requestLogger.log(logLine);
final Map<String, Object> map = readContextMap(baos.toByteArray());
Assert.assertEquals("datasource", map.get("dataSource"));
Assert.assertEquals("PT86400S", map.get("duration"));
Assert.assertEquals("false", map.get("hasFilters"));
Assert.assertEquals("fake", map.get("queryType"));
Assert.assertEquals("some.host.tld", map.get("remoteAddr"));
Assert.assertEquals("false", map.get("descending"));
Assert.assertEquals("bar", map.get("foo"));
}
private static Map<String, Object> readContextMap(byte[] bytes) throws Exception
{
final Map<String, Object> rawMap = mapper.readValue(bytes, new TypeReference<Map<String, Object>>()
{
});
final Object contextMap = rawMap.get("contextMap");
if (contextMap == null) {
return null;
}
final Collection<Map<String, Object>> contextList = (Collection<Map<String, Object>>) contextMap;
final Map<String, Object> context = new HashMap<>();
for (Map<String, Object> microContext : contextList) {
final String key = microContext.get("key").toString();
final Object value = microContext.get("value");
if (key != null && value != null) {
context.put(key, value);
}
}
return ImmutableMap.copyOf(context);
}
}
@JsonTypeName("fake")
class FakeQuery extends BaseQuery
{
public FakeQuery(DataSource dataSource, QuerySegmentSpec querySegmentSpec, boolean descending, Map context)
{
super(dataSource, querySegmentSpec, descending, context);
}
@Override
public boolean hasFilters()
{
return false;
}
@Override
public DimFilter getFilter()
{
throw new UnsupportedOperationException("shouldn't be here");
}
@Override
public String getType()
{
return "fake";
}
@Override
public Query withQuerySegmentSpec(QuerySegmentSpec spec)
{
throw new UnsupportedOperationException("shouldn't be here");
}
@Override
public Query withDataSource(DataSource dataSource)
{
throw new UnsupportedOperationException("shouldn't be here");
}
@Override
public Query withOverriddenContext(Map contextOverride)
{
throw new UnsupportedOperationException("shouldn't be here");
}
}