mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Structured audit logging (#31931)
Changes the format of log events in the audit logfile. It also changes the filename suffix from `_access` to `_audit`. The new entry format is consistent with Elastic Common Schema. Entries are formatted as JSON with no nested objects and field names have a dotted syntax. Moreover, log entries themselves are not spaced by commas and there is exactly one entry per line. In addition, entry fields are ordered, unlike a typical JSON doc, such that a human would not strain his eyes over jumbled fields from one line to the other; the order is defined in the log4j2 properties file. The implementation utilizes the log4j2's `StringMapMessage`. This means that the application builds the log event as a map and the log4j logic (the appender's layout) handle the format internally. The layout, such as the set of printed fields and their order, can be changed at runtime without restarting the node.
This commit is contained in:
parent
faa3c16241
commit
c86e2d5211
@ -1,9 +1,64 @@
|
||||
appender.audit_rolling.type = RollingFile
|
||||
appender.audit_rolling.name = audit_rolling
|
||||
appender.audit_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_access.log
|
||||
appender.audit_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_audit.log
|
||||
appender.audit_rolling.layout.type = PatternLayout
|
||||
appender.audit_rolling.layout.pattern = [%d{ISO8601}] %m%n
|
||||
appender.audit_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_access-%d{yyyy-MM-dd}.log
|
||||
appender.audit_rolling.layout.pattern = {\
|
||||
"@timestamp":"%d{ISO8601}"\
|
||||
%varsNotEmpty{, "node.name":"%enc{%map{node.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "node.id":"%enc{%map{node.id}}{JSON}"}\
|
||||
%varsNotEmpty{, "host.name":"%enc{%map{host.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "host.ip":"%enc{%map{host.ip}}{JSON}"}\
|
||||
%varsNotEmpty{, "event.type":"%enc{%map{event.type}}{JSON}"}\
|
||||
%varsNotEmpty{, "event.action":"%enc{%map{event.action}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.name":"%enc{%map{user.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.run_by.name":"%enc{%map{user.run_by.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.run_as.name":"%enc{%map{user.run_as.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.realm":"%enc{%map{user.realm}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.run_by.realm":"%enc{%map{user.run_by.realm}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.run_as.realm":"%enc{%map{user.run_as.realm}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.roles":%map{user.roles}}\
|
||||
%varsNotEmpty{, "origin.type":"%enc{%map{origin.type}}{JSON}"}\
|
||||
%varsNotEmpty{, "origin.address":"%enc{%map{origin.address}}{JSON}"}\
|
||||
%varsNotEmpty{, "realm":"%enc{%map{realm}}{JSON}"}\
|
||||
%varsNotEmpty{, "url.path":"%enc{%map{url.path}}{JSON}"}\
|
||||
%varsNotEmpty{, "url.query":"%enc{%map{url.query}}{JSON}"}\
|
||||
%varsNotEmpty{, "request.body":"%enc{%map{request.body}}{JSON}"}\
|
||||
%varsNotEmpty{, "action":"%enc{%map{action}}{JSON}"}\
|
||||
%varsNotEmpty{, "request.name":"%enc{%map{request.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "indices":%map{indices}}\
|
||||
%varsNotEmpty{, "opaque_id":"%enc{%map{opaque_id}}{JSON}"}\
|
||||
%varsNotEmpty{, "transport.profile":"%enc{%map{transport.profile}}{JSON}"}\
|
||||
%varsNotEmpty{, "rule":"%enc{%map{rule}}{JSON}"}\
|
||||
%varsNotEmpty{, "event.category":"%enc{%map{event.category}}{JSON}"}\
|
||||
}%n
|
||||
# "node.name" node name from the `elasticsearch.yml` settings
|
||||
# "node.id" node id which should not change between cluster restarts
|
||||
# "host.name" unresolved hostname of the local node
|
||||
# "host.ip" the local bound ip (i.e. the ip listening for connections)
|
||||
# "event.type" a received REST request is translated into one or more transport requests. This indicates which processing layer generated the event "rest" or "transport" (internal)
|
||||
# "event.action" the name of the audited event, eg. "authentication_failed", "access_granted", "run_as_granted", etc.
|
||||
# "user.name" the subject name as authenticated by a realm
|
||||
# "user.run_by.name" the original authenticated subject name that is impersonating another one.
|
||||
# "user.run_as.name" if this "event.action" is of a run_as type, this is the subject name to be impersonated as.
|
||||
# "user.realm" the name of the realm that authenticated "user.name"
|
||||
# "user.run_by.realm" the realm name of the impersonating subject ("user.run_by.name")
|
||||
# "user.run_as.realm" if this "event.action" is of a run_as type, this is the realm name the impersonated user is looked up from
|
||||
# "user.roles" the roles array of the user; these are the roles that are granting privileges
|
||||
# "origin.type" it is "rest" if the event is originating (is in relation to) a REST request; possible other values are "transport" and "ip_filter"
|
||||
# "origin.address" the remote address and port of the first network hop, i.e. a REST proxy or another cluster node
|
||||
# "realm" name of a realm that has generated an "authentication_failed" or an "authentication_successful"; the subject is not yet authenticated
|
||||
# "url.path" the URI component between the port and the query string; it is percent (URL) encoded
|
||||
# "url.query" the URI component after the path and before the fragment; it is percent (URL) encoded
|
||||
# "request.body" the content of the request body entity, JSON escaped
|
||||
# "action" an action is the most granular operation that is authorized and this identifies it in a namespaced way (internal)
|
||||
# "request.name" if the event is in connection to a transport message this is the name of the request class, similar to how rest requests are identified by the url path (internal)
|
||||
# "indices" the array of indices that the "action" is acting upon
|
||||
# "opaque_id" opaque value conveyed by the "X-Opaque-Id" request header
|
||||
# "transport.profile" name of the transport profile in case this is a "connection_granted" or "connection_denied" event
|
||||
# "rule" name of the applied rulee if the "origin.type" is "ip_filter"
|
||||
# "event.category" fixed value "elasticsearch-audit"
|
||||
|
||||
appender.audit_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_audit-%d{yyyy-MM-dd}.log
|
||||
appender.audit_rolling.policies.type = Policies
|
||||
appender.audit_rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.audit_rolling.policies.time.interval = 1
|
||||
|
@ -8,26 +8,51 @@ package org.elasticsearch.xpack.core.security.audit.logfile;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.core.Layout;
|
||||
import org.apache.logging.log4j.core.LogEvent;
|
||||
import org.apache.logging.log4j.core.LoggerContext;
|
||||
import org.apache.logging.log4j.core.StringLayout;
|
||||
import org.apache.logging.log4j.core.appender.AbstractAppender;
|
||||
import org.apache.logging.log4j.core.config.Configuration;
|
||||
import org.apache.logging.log4j.core.config.LoggerConfig;
|
||||
import org.apache.logging.log4j.core.filter.RegexFilter;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Logger that captures events and appends them to in memory lists, with one
|
||||
* list for each log level. This works with the global log manager context,
|
||||
* meaning that there could only be a single logger with the same name.
|
||||
*/
|
||||
public class CapturingLogger {
|
||||
|
||||
public static Logger newCapturingLogger(final Level level) throws IllegalAccessException {
|
||||
/**
|
||||
* Constructs a new {@link CapturingLogger} named as the fully qualified name of
|
||||
* the invoking method. One name can be assigned to a single logger globally, so
|
||||
* don't call this method multiple times in the same method.
|
||||
*
|
||||
* @param level
|
||||
* The minimum priority level of events that will be captured.
|
||||
* @param layout
|
||||
* Optional parameter allowing to set the layout format of events.
|
||||
* This is useful because events are captured to be inspected (and
|
||||
* parsed) later. When parsing, it is useful to be in control of the
|
||||
* printing format as well. If not specified,
|
||||
* {@code event.getMessage().getFormattedMessage()} is called to
|
||||
* format the event.
|
||||
* @return The new logger.
|
||||
*/
|
||||
public static Logger newCapturingLogger(final Level level, @Nullable StringLayout layout) throws IllegalAccessException {
|
||||
// careful, don't "bury" this on the call stack, unless you know what you're doing
|
||||
final StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
|
||||
final String name = caller.getClassName() + "." + caller.getMethodName() + "." + level.toString();
|
||||
final Logger logger = ESLoggerFactory.getLogger(name);
|
||||
Loggers.setLevel(logger, level);
|
||||
final MockAppender appender = new MockAppender(name);
|
||||
final MockAppender appender = new MockAppender(name, layout);
|
||||
appender.start();
|
||||
Loggers.addAppender(logger, appender);
|
||||
return logger;
|
||||
@ -40,11 +65,27 @@ public class CapturingLogger {
|
||||
return (MockAppender) loggerConfig.getAppenders().get(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the logger's appender has captured any events.
|
||||
*
|
||||
* @param name
|
||||
* The unique global name of the logger.
|
||||
* @return {@code true} if no event has been captured, {@code false} otherwise.
|
||||
*/
|
||||
public static boolean isEmpty(final String name) {
|
||||
final MockAppender appender = getMockAppender(name);
|
||||
return appender.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the captured events for a logger by its name.
|
||||
*
|
||||
* @param name
|
||||
* The unique global name of the logger.
|
||||
* @param level
|
||||
* The priority level of the captured events to be returned.
|
||||
* @return A list of captured events formated to {@code String}.
|
||||
*/
|
||||
public static List<String> output(final String name, final Level level) {
|
||||
final MockAppender appender = getMockAppender(name);
|
||||
return appender.output(level);
|
||||
@ -58,8 +99,8 @@ public class CapturingLogger {
|
||||
public final List<String> debug = new ArrayList<>();
|
||||
public final List<String> trace = new ArrayList<>();
|
||||
|
||||
private MockAppender(final String name) throws IllegalAccessException {
|
||||
super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), null);
|
||||
private MockAppender(final String name, StringLayout layout) throws IllegalAccessException {
|
||||
super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], false, null, null), layout);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -68,25 +109,34 @@ public class CapturingLogger {
|
||||
// we can not keep a reference to the event here because Log4j is using a thread
|
||||
// local instance under the hood
|
||||
case "ERROR":
|
||||
error.add(event.getMessage().getFormattedMessage());
|
||||
error.add(formatMessage(event));
|
||||
break;
|
||||
case "WARN":
|
||||
warn.add(event.getMessage().getFormattedMessage());
|
||||
warn.add(formatMessage(event));
|
||||
break;
|
||||
case "INFO":
|
||||
info.add(event.getMessage().getFormattedMessage());
|
||||
info.add(formatMessage(event));
|
||||
break;
|
||||
case "DEBUG":
|
||||
debug.add(event.getMessage().getFormattedMessage());
|
||||
debug.add(formatMessage(event));
|
||||
break;
|
||||
case "TRACE":
|
||||
trace.add(event.getMessage().getFormattedMessage());
|
||||
trace.add(formatMessage(event));
|
||||
break;
|
||||
default:
|
||||
throw invalidLevelException(event.getLevel());
|
||||
}
|
||||
}
|
||||
|
||||
private String formatMessage(LogEvent event) {
|
||||
final Layout<?> layout = getLayout();
|
||||
if (layout instanceof StringLayout) {
|
||||
return ((StringLayout) layout).toSerializable(event);
|
||||
} else {
|
||||
return event.getMessage().getFormattedMessage();
|
||||
}
|
||||
}
|
||||
|
||||
private IllegalArgumentException invalidLevelException(Level level) {
|
||||
return new IllegalArgumentException("invalid level, expected [ERROR|WARN|INFO|DEBUG|TRACE] but was [" + level + "]");
|
||||
}
|
||||
|
@ -140,6 +140,7 @@ artifacts {
|
||||
}
|
||||
sourceSets.test.resources {
|
||||
srcDir '../core/src/test/resources'
|
||||
srcDir '../core/src/main/config'
|
||||
}
|
||||
dependencyLicenses {
|
||||
mapping from: /java-support|opensaml-.*/, to: 'shibboleth'
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -137,9 +137,10 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
|
||||
.put(XPackSettings.WATCHER_ENABLED.getKey(), false)
|
||||
.put(XPackSettings.MONITORING_ENABLED.getKey(), false)
|
||||
.put(XPackSettings.AUDIT_ENABLED.getKey(), randomBoolean())
|
||||
.put(LoggingAuditTrail.HOST_ADDRESS_SETTING.getKey(), randomBoolean())
|
||||
.put(LoggingAuditTrail.HOST_NAME_SETTING.getKey(), randomBoolean())
|
||||
.put(LoggingAuditTrail.NODE_NAME_SETTING.getKey(), randomBoolean())
|
||||
.put(LoggingAuditTrail.EMIT_HOST_ADDRESS_SETTING.getKey(), randomBoolean())
|
||||
.put(LoggingAuditTrail.EMIT_HOST_NAME_SETTING.getKey(), randomBoolean())
|
||||
.put(LoggingAuditTrail.EMIT_NODE_NAME_SETTING.getKey(), randomBoolean())
|
||||
.put(LoggingAuditTrail.EMIT_NODE_ID_SETTING.getKey(), randomBoolean())
|
||||
.put("xpack.security.authc.realms.file.type", FileRealmSettings.TYPE)
|
||||
.put("xpack.security.authc.realms.file.order", 0)
|
||||
.put("xpack.security.authc.realms.index.type", NativeRealmSettings.TYPE)
|
||||
|
@ -21,11 +21,11 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ -106,28 +106,45 @@ public class AuditTrailSettingsUpdateTests extends SecurityIntegTestCase {
|
||||
public void testDynamicHostSettings() {
|
||||
final boolean persistent = randomBoolean();
|
||||
final Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put(LoggingAuditTrail.HOST_ADDRESS_SETTING.getKey(), true);
|
||||
settingsBuilder.put(LoggingAuditTrail.HOST_NAME_SETTING.getKey(), true);
|
||||
settingsBuilder.put(LoggingAuditTrail.NODE_NAME_SETTING.getKey(), true);
|
||||
settingsBuilder.put(LoggingAuditTrail.EMIT_HOST_ADDRESS_SETTING.getKey(), true);
|
||||
settingsBuilder.put(LoggingAuditTrail.EMIT_HOST_NAME_SETTING.getKey(), true);
|
||||
settingsBuilder.put(LoggingAuditTrail.EMIT_NODE_NAME_SETTING.getKey(), true);
|
||||
settingsBuilder.put(LoggingAuditTrail.EMIT_NODE_ID_SETTING.getKey(), true);
|
||||
updateSettings(settingsBuilder.build(), persistent);
|
||||
final LoggingAuditTrail loggingAuditTrail = ((LoggingAuditTrail) internalCluster().getInstances(AuditTrailService.class)
|
||||
final LoggingAuditTrail loggingAuditTrail = (LoggingAuditTrail) internalCluster().getInstances(AuditTrailService.class)
|
||||
.iterator()
|
||||
.next()
|
||||
.getAuditTrails()
|
||||
.iterator()
|
||||
.next());
|
||||
assertTrue(Pattern.matches("\\[127\\.0\\.0\\.1\\] \\[127\\.0\\.0\\.1\\] \\[node_.*\\] ", loggingAuditTrail.localNodeInfo.prefix));
|
||||
settingsBuilder.put(LoggingAuditTrail.HOST_ADDRESS_SETTING.getKey(), false);
|
||||
.next();
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.get(LoggingAuditTrail.NODE_NAME_FIELD_NAME), startsWith("node_"));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.NODE_ID_FIELD_NAME), is(true));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.get(LoggingAuditTrail.HOST_ADDRESS_FIELD_NAME), is("127.0.0.1"));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.get(LoggingAuditTrail.HOST_NAME_FIELD_NAME), is("127.0.0.1"));
|
||||
settingsBuilder.put(LoggingAuditTrail.EMIT_HOST_ADDRESS_SETTING.getKey(), false);
|
||||
updateSettings(settingsBuilder.build(), persistent);
|
||||
assertTrue(Pattern.matches("\\[127\\.0\\.0\\.1\\] \\[node_.*\\] ", loggingAuditTrail.localNodeInfo.prefix));
|
||||
settingsBuilder.put(LoggingAuditTrail.HOST_ADDRESS_SETTING.getKey(), true);
|
||||
settingsBuilder.put(LoggingAuditTrail.HOST_NAME_SETTING.getKey(), false);
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.get(LoggingAuditTrail.NODE_NAME_FIELD_NAME), startsWith("node_"));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.NODE_ID_FIELD_NAME), is(true));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.HOST_ADDRESS_FIELD_NAME), is(false));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.get(LoggingAuditTrail.HOST_NAME_FIELD_NAME), is("127.0.0.1"));
|
||||
settingsBuilder.put(LoggingAuditTrail.EMIT_HOST_NAME_SETTING.getKey(), false);
|
||||
updateSettings(settingsBuilder.build(), persistent);
|
||||
assertTrue(Pattern.matches("\\[127\\.0\\.0\\.1\\] \\[node_.*\\] ", loggingAuditTrail.localNodeInfo.prefix));
|
||||
settingsBuilder.put(LoggingAuditTrail.HOST_NAME_SETTING.getKey(), true);
|
||||
settingsBuilder.put(LoggingAuditTrail.NODE_NAME_SETTING.getKey(), false);
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.get(LoggingAuditTrail.NODE_NAME_FIELD_NAME), startsWith("node_"));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.NODE_ID_FIELD_NAME), is(true));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.HOST_ADDRESS_FIELD_NAME), is(false));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.HOST_NAME_FIELD_NAME), is(false));
|
||||
settingsBuilder.put(LoggingAuditTrail.EMIT_NODE_NAME_SETTING.getKey(), false);
|
||||
updateSettings(settingsBuilder.build(), persistent);
|
||||
assertTrue(Pattern.matches("\\[127\\.0\\.0\\.1\\] \\[127\\.0\\.0\\.1\\] ", loggingAuditTrail.localNodeInfo.prefix));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.NODE_NAME_FIELD_NAME), is(false));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.NODE_ID_FIELD_NAME), is(true));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.HOST_ADDRESS_FIELD_NAME), is(false));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.HOST_NAME_FIELD_NAME), is(false));
|
||||
settingsBuilder.put(LoggingAuditTrail.EMIT_NODE_ID_SETTING.getKey(), false);
|
||||
updateSettings(settingsBuilder.build(), persistent);
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.NODE_NAME_FIELD_NAME), is(false));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.NODE_ID_FIELD_NAME), is(false));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.HOST_ADDRESS_FIELD_NAME), is(false));
|
||||
assertThat(loggingAuditTrail.entryCommonFields.commonFields.containsKey(LoggingAuditTrail.HOST_NAME_FIELD_NAME), is(false));
|
||||
}
|
||||
|
||||
public void testDynamicRequestBodySettings() {
|
||||
@ -136,12 +153,12 @@ public class AuditTrailSettingsUpdateTests extends SecurityIntegTestCase {
|
||||
final Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put(LoggingAuditTrail.INCLUDE_REQUEST_BODY.getKey(), enableRequestBody);
|
||||
updateSettings(settingsBuilder.build(), persistent);
|
||||
final LoggingAuditTrail loggingAuditTrail = ((LoggingAuditTrail) internalCluster().getInstances(AuditTrailService.class)
|
||||
final LoggingAuditTrail loggingAuditTrail = (LoggingAuditTrail) internalCluster().getInstances(AuditTrailService.class)
|
||||
.iterator()
|
||||
.next()
|
||||
.getAuditTrails()
|
||||
.iterator()
|
||||
.next());
|
||||
.next();
|
||||
assertEquals(enableRequestBody, loggingAuditTrail.includeRequestBody);
|
||||
settingsBuilder.put(LoggingAuditTrail.INCLUDE_REQUEST_BODY.getKey(), !enableRequestBody);
|
||||
updateSettings(settingsBuilder.build(), persistent);
|
||||
@ -158,12 +175,12 @@ public class AuditTrailSettingsUpdateTests extends SecurityIntegTestCase {
|
||||
settingsBuilder.putList(LoggingAuditTrail.INCLUDE_EVENT_SETTINGS.getKey(), includedEvents);
|
||||
settingsBuilder.putList(LoggingAuditTrail.EXCLUDE_EVENT_SETTINGS.getKey(), excludedEvents);
|
||||
updateSettings(settingsBuilder.build(), randomBoolean());
|
||||
final LoggingAuditTrail loggingAuditTrail = ((LoggingAuditTrail) internalCluster().getInstances(AuditTrailService.class)
|
||||
final LoggingAuditTrail loggingAuditTrail = (LoggingAuditTrail) internalCluster().getInstances(AuditTrailService.class)
|
||||
.iterator()
|
||||
.next()
|
||||
.getAuditTrails()
|
||||
.iterator()
|
||||
.next());
|
||||
.next();
|
||||
assertEquals(AuditLevel.parse(includedEvents, excludedEvents), loggingAuditTrail.events);
|
||||
}
|
||||
|
||||
|
@ -59,9 +59,6 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
private Settings settings;
|
||||
private DiscoveryNode localNode;
|
||||
private ClusterService clusterService;
|
||||
private ThreadContext threadContext;
|
||||
private Logger logger;
|
||||
List<String> logOutput;
|
||||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
@ -83,12 +80,11 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
arg0.updateLocalNodeInfo(localNode);
|
||||
return null;
|
||||
}).when(clusterService).addListener(Mockito.isA(LoggingAuditTrail.class));
|
||||
threadContext = new ThreadContext(Settings.EMPTY);
|
||||
logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
logOutput = CapturingLogger.output(logger.getName(), Level.INFO);
|
||||
}
|
||||
|
||||
public void testSingleCompletePolicyPredicate() throws Exception {
|
||||
final Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
// create complete filter policy
|
||||
final Settings.Builder settingsBuilder = Settings.builder().put(settings);
|
||||
// filter by username
|
||||
@ -179,6 +175,8 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testSingleCompleteWithEmptyFieldPolicyPredicate() throws Exception {
|
||||
final Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
// create complete filter policy
|
||||
final Settings.Builder settingsBuilder = Settings.builder().put(settings);
|
||||
// filter by username
|
||||
@ -275,6 +273,8 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testTwoPolicyPredicatesWithMissingFields() throws Exception {
|
||||
final Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
final Settings.Builder settingsBuilder = Settings.builder().put(settings);
|
||||
// first policy: realms and roles filters
|
||||
final List<String> filteredRealms = randomNonEmptyListOfFilteredNames();
|
||||
@ -341,6 +341,8 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testUsersFilter() throws Exception {
|
||||
final Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
final List<String> allFilteredUsers = new ArrayList<>();
|
||||
final Settings.Builder settingsBuilder = Settings.builder().put(settings);
|
||||
for (int i = 0; i < randomIntBetween(1, 4); i++) {
|
||||
@ -387,6 +389,7 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
final MockToken unfilteredToken = new MockToken(UNFILTER_MARKER + randomAlphaOfLengthBetween(1, 4));
|
||||
|
||||
final LoggingAuditTrail auditTrail = new LoggingAuditTrail(settingsBuilder.build(), clusterService, logger, threadContext);
|
||||
final List<String> logOutput = CapturingLogger.output(logger.getName(), Level.INFO);
|
||||
// anonymous accessDenied
|
||||
auditTrail.anonymousAccessDenied("_action", message);
|
||||
if (filterMissingUser) {
|
||||
@ -623,6 +626,8 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testRealmsFilter() throws Exception {
|
||||
final Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
final List<String> allFilteredRealms = new ArrayList<>();
|
||||
final Settings.Builder settingsBuilder = Settings.builder().put(settings);
|
||||
for (int i = 0; i < randomIntBetween(1, 4); i++) {
|
||||
@ -659,6 +664,7 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
final MockToken authToken = new MockToken("token1");
|
||||
|
||||
final LoggingAuditTrail auditTrail = new LoggingAuditTrail(settingsBuilder.build(), clusterService, logger, threadContext);
|
||||
final List<String> logOutput = CapturingLogger.output(logger.getName(), Level.INFO);
|
||||
// anonymous accessDenied
|
||||
auditTrail.anonymousAccessDenied("_action", message);
|
||||
if (filterMissingRealm) {
|
||||
@ -908,6 +914,8 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testRolesFilter() throws Exception {
|
||||
final Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
final List<List<String>> allFilteredRoles = new ArrayList<>();
|
||||
final Settings.Builder settingsBuilder = Settings.builder().put(settings);
|
||||
for (int i = 0; i < randomIntBetween(1, 4); i++) {
|
||||
@ -966,6 +974,7 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
final MockToken authToken = new MockToken("token1");
|
||||
|
||||
final LoggingAuditTrail auditTrail = new LoggingAuditTrail(settingsBuilder.build(), clusterService, logger, threadContext);
|
||||
final List<String> logOutput = CapturingLogger.output(logger.getName(), Level.INFO);
|
||||
// anonymous accessDenied
|
||||
auditTrail.anonymousAccessDenied("_action", message);
|
||||
if (filterMissingRoles) {
|
||||
@ -1179,6 +1188,8 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testIndicesFilter() throws Exception {
|
||||
final Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
final List<List<String>> allFilteredIndices = new ArrayList<>();
|
||||
final Settings.Builder settingsBuilder = Settings.builder().put(settings);
|
||||
for (int i = 0; i < randomIntBetween(1, 3); i++) {
|
||||
@ -1236,6 +1247,7 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
||||
final TransportMessage noIndexMessage = new MockMessage(threadContext);
|
||||
|
||||
final LoggingAuditTrail auditTrail = new LoggingAuditTrail(settingsBuilder.build(), clusterService, logger, threadContext);
|
||||
final List<String> logOutput = CapturingLogger.output(logger.getName(), Level.INFO);
|
||||
// anonymous accessDenied
|
||||
auditTrail.anonymousAccessDenied("_action", noIndexMessage);
|
||||
if (filterMissingIndices) {
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -185,7 +185,7 @@ public class FileUserPasswdStoreTests extends ESTestCase {
|
||||
|
||||
public void testParseFile_Empty() throws Exception {
|
||||
Path empty = createTempFile();
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG, null);
|
||||
Map<String, char[]> users = FileUserPasswdStore.parseFile(empty, logger, Settings.EMPTY);
|
||||
assertThat(users.isEmpty(), is(true));
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.DEBUG);
|
||||
@ -195,7 +195,7 @@ public class FileUserPasswdStoreTests extends ESTestCase {
|
||||
|
||||
public void testParseFile_WhenFileDoesNotExist() throws Exception {
|
||||
Path file = createTempDir().resolve(randomAlphaOfLength(10));
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
Map<String, char[]> users = FileUserPasswdStore.parseFile(file, logger, Settings.EMPTY);
|
||||
assertThat(users, nullValue());
|
||||
users = FileUserPasswdStore.parseFileLenient(file, logger, Settings.EMPTY);
|
||||
@ -207,7 +207,7 @@ public class FileUserPasswdStoreTests extends ESTestCase {
|
||||
Path file = createTempFile();
|
||||
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
|
||||
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
try {
|
||||
FileUserPasswdStore.parseFile(file, logger, Settings.EMPTY);
|
||||
fail("expected a parse failure");
|
||||
@ -228,7 +228,7 @@ public class FileUserPasswdStoreTests extends ESTestCase {
|
||||
Path file = createTempFile();
|
||||
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
|
||||
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
Map<String, char[]> users = FileUserPasswdStore.parseFileLenient(file, logger, Settings.EMPTY);
|
||||
assertThat(users, notNullValue());
|
||||
assertThat(users.isEmpty(), is(true));
|
||||
|
@ -175,7 +175,7 @@ public class FileUserRolesStoreTests extends ESTestCase {
|
||||
|
||||
public void testParseFileEmpty() throws Exception {
|
||||
Path empty = createTempFile();
|
||||
Logger log = CapturingLogger.newCapturingLogger(Level.DEBUG);
|
||||
Logger log = CapturingLogger.newCapturingLogger(Level.DEBUG, null);
|
||||
FileUserRolesStore.parseFile(empty, log);
|
||||
List<String> events = CapturingLogger.output(log.getName(), Level.DEBUG);
|
||||
assertThat(events.size(), is(1));
|
||||
@ -184,7 +184,7 @@ public class FileUserRolesStoreTests extends ESTestCase {
|
||||
|
||||
public void testParseFileWhenFileDoesNotExist() throws Exception {
|
||||
Path file = createTempDir().resolve(randomAlphaOfLength(10));
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
Map<String, String[]> usersRoles = FileUserRolesStore.parseFile(file, logger);
|
||||
assertThat(usersRoles, nullValue());
|
||||
usersRoles = FileUserRolesStore.parseFileLenient(file, logger);
|
||||
@ -199,7 +199,7 @@ public class FileUserRolesStoreTests extends ESTestCase {
|
||||
|
||||
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
|
||||
Files.write(file, lines, StandardCharsets.UTF_16);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG, null);
|
||||
try {
|
||||
FileUserRolesStore.parseFile(file, logger);
|
||||
fail("expected a parse failure");
|
||||
@ -256,7 +256,7 @@ public class FileUserRolesStoreTests extends ESTestCase {
|
||||
|
||||
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
|
||||
Files.write(file, lines, StandardCharsets.UTF_16);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG, null);
|
||||
Map<String, String[]> usersRoles = FileUserRolesStore.parseFileLenient(file, logger);
|
||||
assertThat(usersRoles, notNullValue());
|
||||
assertThat(usersRoles.isEmpty(), is(true));
|
||||
|
@ -199,7 +199,7 @@ public class DnRoleMapperTests extends ESTestCase {
|
||||
|
||||
public void testParseFile() throws Exception {
|
||||
Path file = getDataPath("role_mapping.yml");
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
Map<DN, Set<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
|
||||
assertThat(mappings, notNullValue());
|
||||
assertThat(mappings.size(), is(3));
|
||||
@ -229,18 +229,19 @@ public class DnRoleMapperTests extends ESTestCase {
|
||||
public void testParseFile_Empty() throws Exception {
|
||||
Path file = createTempDir().resolve("foo.yaml");
|
||||
Files.createFile(file);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG, null);
|
||||
Map<DN, Set<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
|
||||
assertThat(mappings, notNullValue());
|
||||
assertThat(mappings.isEmpty(), is(true));
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.DEBUG);
|
||||
assertThat(events.size(), is(1));
|
||||
assertThat(events.get(0), containsString("[0] role mappings found"));
|
||||
events.clear();
|
||||
}
|
||||
|
||||
public void testParseFile_WhenFileDoesNotExist() throws Exception {
|
||||
Path file = createTempDir().resolve(randomAlphaOfLength(10));
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
Map<DN, Set<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
|
||||
assertThat(mappings, notNullValue());
|
||||
assertThat(mappings.isEmpty(), is(true));
|
||||
@ -257,7 +258,7 @@ public class DnRoleMapperTests extends ESTestCase {
|
||||
Path file = createTempFile("", ".yml");
|
||||
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
|
||||
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
try {
|
||||
DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
|
||||
fail("expected a parse failure");
|
||||
@ -270,13 +271,14 @@ public class DnRoleMapperTests extends ESTestCase {
|
||||
Path file = createTempFile("", ".yml");
|
||||
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
|
||||
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
Map<DN, Set<String>> mappings = DnRoleMapper.parseFileLenient(file, logger, "_type", "_name");
|
||||
assertThat(mappings, notNullValue());
|
||||
assertThat(mappings.isEmpty(), is(true));
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||
assertThat(events.size(), is(1));
|
||||
assertThat(events.get(0), containsString("failed to parse role mappings file"));
|
||||
events.clear();
|
||||
}
|
||||
|
||||
public void testYaml() throws Exception {
|
||||
|
@ -237,7 +237,9 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||
|
||||
public void testParseFileWithFLSAndDLSDisabled() throws Exception {
|
||||
Path path = getDataPath("roles.yml");
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR, null);
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||
events.clear();
|
||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.builder()
|
||||
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), false)
|
||||
.build(), new XPackLicenseState(Settings.EMPTY));
|
||||
@ -247,7 +249,6 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||
assertThat(roles.get("role_query"), nullValue());
|
||||
assertThat(roles.get("role_query_fields"), nullValue());
|
||||
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||
assertThat(events, hasSize(3));
|
||||
assertThat(
|
||||
events.get(0),
|
||||
@ -263,7 +264,9 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||
|
||||
public void testParseFileWithFLSAndDLSUnlicensed() throws Exception {
|
||||
Path path = getDataPath("roles.yml");
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.WARN);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.WARN, null);
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.WARN);
|
||||
events.clear();
|
||||
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
||||
when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false);
|
||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState);
|
||||
@ -273,7 +276,6 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||
assertNotNull(roles.get("role_query"));
|
||||
assertNotNull(roles.get("role_query_fields"));
|
||||
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.WARN);
|
||||
assertThat(events, hasSize(3));
|
||||
assertThat(
|
||||
events.get(0),
|
||||
@ -369,7 +371,9 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||
|
||||
public void testThatInvalidRoleDefinitions() throws Exception {
|
||||
Path path = getDataPath("invalid_roles.yml");
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR, null);
|
||||
List<String> entries = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||
entries.clear();
|
||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
|
||||
assertThat(roles.size(), is(1));
|
||||
assertThat(roles, hasKey("valid_role"));
|
||||
@ -379,7 +383,6 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||
assertThat(role, notNullValue());
|
||||
assertThat(role.names(), equalTo(new String[] { "valid_role" }));
|
||||
|
||||
List<String> entries = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||
assertThat(entries, hasSize(6));
|
||||
assertThat(
|
||||
entries.get(0),
|
||||
@ -395,12 +398,13 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||
|
||||
public void testThatRoleNamesDoesNotResolvePermissions() throws Exception {
|
||||
Path path = getDataPath("invalid_roles.yml");
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR);
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR, null);
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||
events.clear();
|
||||
Set<String> roleNames = FileRolesStore.parseFileForRoleNames(path, logger);
|
||||
assertThat(roleNames.size(), is(6));
|
||||
assertThat(roleNames, containsInAnyOrder("valid_role", "role1", "role2", "role3", "role4", "role5"));
|
||||
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||
assertThat(events, hasSize(1));
|
||||
assertThat(
|
||||
events.get(0),
|
||||
@ -408,9 +412,9 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testReservedRoles() throws Exception {
|
||||
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
|
||||
|
||||
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||
events.clear();
|
||||
Path path = getDataPath("reserved_roles.yml");
|
||||
Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY));
|
||||
assertThat(roles, notNullValue());
|
||||
@ -418,7 +422,6 @@ public class FileRolesStoreTests extends ESTestCase {
|
||||
|
||||
assertThat(roles, hasKey("admin"));
|
||||
|
||||
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
|
||||
assertThat(events, notNullValue());
|
||||
assertThat(events, hasSize(4));
|
||||
// the system role will always be checked first
|
||||
|
@ -40,7 +40,7 @@ subprojects {
|
||||
|
||||
integTestRunner {
|
||||
systemProperty 'tests.audit.logfile',
|
||||
"${ -> integTest.nodes[0].homeDir}/logs/${ -> integTest.nodes[0].clusterName }_access.log"
|
||||
"${ -> integTest.nodes[0].homeDir}/logs/${ -> integTest.nodes[0].clusterName }_audit.log"
|
||||
}
|
||||
|
||||
runqa {
|
||||
|
@ -248,14 +248,14 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
|
||||
final Matcher<String> runByRealmMatcher = realm.equals("default_file") ? Matchers.nullValue(String.class)
|
||||
: Matchers.is("default_file");
|
||||
logCheckers.add(
|
||||
m -> eventType.equals(m.get("event_type"))
|
||||
m -> eventType.equals(m.get("event.action"))
|
||||
&& action.equals(m.get("action"))
|
||||
&& principal.equals(m.get("principal"))
|
||||
&& realm.equals(m.get("realm"))
|
||||
&& runByPrincipalMatcher.matches(m.get("run_by_principal"))
|
||||
&& runByRealmMatcher.matches(m.get("run_by_realm"))
|
||||
&& principal.equals(m.get("user.name"))
|
||||
&& realm.equals(m.get("user.realm"))
|
||||
&& runByPrincipalMatcher.matches(m.get("user.run_by.name"))
|
||||
&& runByRealmMatcher.matches(m.get("user.run_by.realm"))
|
||||
&& indicesMatcher.matches(m.get("indices"))
|
||||
&& request.equals(m.get("request")));
|
||||
&& request.equals(m.get("request.name")));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -6,6 +6,7 @@
|
||||
package org.elasticsearch.xpack.qa.sql.security;
|
||||
|
||||
import org.apache.lucene.util.SuppressForbidden;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
@ -14,6 +15,7 @@ import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.hamcrest.Matcher;
|
||||
@ -32,17 +34,16 @@ import java.security.PrivilegedAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
|
||||
public abstract class SqlSecurityTestCase extends ESRestTestCase {
|
||||
@ -515,21 +516,22 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown action [" + action + "]");
|
||||
}
|
||||
final String eventType = granted ? "access_granted" : "access_denied";
|
||||
final String eventAction = granted ? "access_granted" : "access_denied";
|
||||
final String realm = principal.equals("test_admin") ? "default_file" : "default_native";
|
||||
return expect(eventType, action, principal, realm, indicesMatcher, request);
|
||||
return expect(eventAction, action, principal, realm, indicesMatcher, request);
|
||||
}
|
||||
|
||||
public AuditLogAsserter expect(String eventType, String action, String principal, String realm,
|
||||
public AuditLogAsserter expect(String eventAction, String action, String principal, String realm,
|
||||
Matcher<? extends Iterable<? extends String>> indicesMatcher, String request) {
|
||||
logCheckers.add(m -> eventType.equals(m.get("event_type"))
|
||||
logCheckers.add(m ->
|
||||
eventAction.equals(m.get("event.action"))
|
||||
&& action.equals(m.get("action"))
|
||||
&& principal.equals(m.get("principal"))
|
||||
&& realm.equals(m.get("realm"))
|
||||
&& Matchers.nullValue(String.class).matches(m.get("run_by_principal"))
|
||||
&& Matchers.nullValue(String.class).matches(m.get("run_by_realm"))
|
||||
&& principal.equals(m.get("user.name"))
|
||||
&& realm.equals(m.get("user.realm"))
|
||||
&& Matchers.nullValue(String.class).matches(m.get("user.run_by.name"))
|
||||
&& Matchers.nullValue(String.class).matches(m.get("user.run_by.realm"))
|
||||
&& indicesMatcher.matches(m.get("indices"))
|
||||
&& request.equals(m.get("request"))
|
||||
&& request.equals(m.get("request.name"))
|
||||
);
|
||||
return this;
|
||||
}
|
||||
@ -554,56 +556,39 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
|
||||
|
||||
List<Map<String, Object>> logs = new ArrayList<>();
|
||||
String line;
|
||||
Pattern logPattern = Pattern.compile(
|
||||
("PART PART PART PART origin_type=PART, origin_address=PART, principal=PART, realm=PART, "
|
||||
+ "(?:run_as_principal=IGN, )?(?:run_as_realm=IGN, )?(?:run_by_principal=PART, )?(?:run_by_realm=PART, )?"
|
||||
+ "roles=PART, action=\\[(.*?)\\], (?:indices=PART, )?request=PART")
|
||||
.replace(" ", "\\s+").replace("PART", "\\[([^\\]]*)\\]").replace("IGN", "\\[[^\\]]*\\]"));
|
||||
// fail(logPattern.toString());
|
||||
while ((line = logReader.readLine()) != null) {
|
||||
java.util.regex.Matcher m = logPattern.matcher(line);
|
||||
if (false == m.matches()) {
|
||||
throw new IllegalArgumentException("Unrecognized log: " + line);
|
||||
try {
|
||||
final Map<String, Object> log = XContentHelper.convertToMap(JsonXContent.jsonXContent, line, false);
|
||||
if (false == ("access_denied".equals(log.get("event.action"))
|
||||
|| "access_granted".equals(log.get("event.action")))) {
|
||||
continue;
|
||||
}
|
||||
assertThat(log.containsKey("action"), is(true));
|
||||
if (false == (SQL_ACTION_NAME.equals(log.get("action")) || GetIndexAction.NAME.equals(log.get("action")))) {
|
||||
// TODO we may want to extend this and the assertions to SearchAction.NAME as well
|
||||
continue;
|
||||
}
|
||||
assertThat(log.containsKey("user.name"), is(true));
|
||||
List<String> indices = new ArrayList<>();
|
||||
if (log.containsKey("indices")) {
|
||||
indices = (ArrayList<String>) log.get("indices");
|
||||
if ("test_admin".equals(log.get("user.name"))) {
|
||||
/*
|
||||
* Sometimes we accidentally sneak access to the security tables. This is fine,
|
||||
* SQL drops them from the interface. So we might have access to them, but we
|
||||
* don't show them.
|
||||
*/
|
||||
indices.remove(".security");
|
||||
indices.remove(".security-6");
|
||||
}
|
||||
}
|
||||
// Use a sorted list for indices for consistent error reporting
|
||||
Collections.sort(indices);
|
||||
log.put("indices", indices);
|
||||
logs.add(log);
|
||||
} catch (final ElasticsearchParseException e) {
|
||||
throw new IllegalArgumentException("Unrecognized log: " + line, e);
|
||||
}
|
||||
int i = 1;
|
||||
Map<String, Object> log = new HashMap<>();
|
||||
/* We *could* parse the date but leaving it in the original format makes it
|
||||
* easier to find the lines in the file that this log comes from. */
|
||||
log.put("time", m.group(i++));
|
||||
log.put("node", m.group(i++));
|
||||
log.put("origin", m.group(i++));
|
||||
String eventType = m.group(i++);
|
||||
if (false == ("access_denied".equals(eventType) || "access_granted".equals(eventType))) {
|
||||
continue;
|
||||
}
|
||||
log.put("event_type", eventType);
|
||||
log.put("origin_type", m.group(i++));
|
||||
log.put("origin_address", m.group(i++));
|
||||
String principal = m.group(i++);
|
||||
log.put("principal", principal);
|
||||
log.put("realm", m.group(i++));
|
||||
log.put("run_by_principal", m.group(i++));
|
||||
log.put("run_by_realm", m.group(i++));
|
||||
log.put("roles", m.group(i++));
|
||||
String action = m.group(i++);
|
||||
if (false == (SQL_ACTION_NAME.equals(action) || GetIndexAction.NAME.equals(action))) {
|
||||
//TODO we may want to extend this and the assertions to SearchAction.NAME as well
|
||||
continue;
|
||||
}
|
||||
log.put("action", action);
|
||||
// Use a sorted list for indices for consistent error reporting
|
||||
List<String> indices = new ArrayList<>(Strings.tokenizeByCommaToSet(m.group(i++)));
|
||||
Collections.sort(indices);
|
||||
if ("test_admin".equals(principal)) {
|
||||
/* Sometimes we accidentally sneak access to the security tables. This is fine, SQL
|
||||
* drops them from the interface. So we might have access to them, but we don't show
|
||||
* them. */
|
||||
indices.remove(".security");
|
||||
indices.remove(".security-6");
|
||||
}
|
||||
log.put("indices", indices);
|
||||
log.put("request", m.group(i));
|
||||
logs.add(log);
|
||||
}
|
||||
List<Map<String, Object>> allLogs = new ArrayList<>(logs);
|
||||
List<Integer> notMatching = new ArrayList<>();
|
||||
|
Loading…
x
Reference in New Issue
Block a user