Merge pull request elastic/elasticsearch#3266 from jasontedor/log4j2

Introduce Log4j 2

Original commit: elastic/x-pack-elasticsearch@b1cee13ac8
This commit is contained in:
Jason Tedor 2016-08-31 23:34:45 -04:00 committed by GitHub
commit d348d4781a
128 changed files with 3308 additions and 971 deletions

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.security;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
@ -60,7 +60,7 @@ public abstract class MigrateToolTestCase extends LuceneTestCase {
*/
public static final String TESTS_CLUSTER_DEFAULT = "localhost:9300";
protected static final ESLogger logger = ESLoggerFactory.getLogger(MigrateToolTestCase.class.getName());
protected static final Logger logger = ESLoggerFactory.getLogger(MigrateToolTestCase.class.getName());
private static final AtomicInteger counter = new AtomicInteger();
private static Client client;

View File

@ -5,16 +5,18 @@
*/
package org.elasticsearch.license;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.component.Lifecycle;
@ -300,7 +302,7 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
@Override
public void onFailure(String source, @Nullable Exception e) {
logger.error("unexpected failure during [{}]", e, source);
logger.error((Supplier<?>) () -> new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
});

View File

@ -6,7 +6,9 @@
package org.elasticsearch.license;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.license.License.OperationMode;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
@ -32,11 +34,11 @@ public final class OperationModeFileWatcher implements FileChangesListener {
private final AtomicBoolean initialized = new AtomicBoolean();
private final OperationMode defaultOperationMode = OperationMode.PLATINUM;
private volatile OperationMode currentOperationMode = defaultOperationMode;
private final ESLogger logger;
private final Logger logger;
private final Runnable onChange;
public OperationModeFileWatcher(ResourceWatcherService resourceWatcherService, Path licenseModePath,
ESLogger logger, Runnable onChange) {
Logger logger, Runnable onChange) {
this.resourceWatcherService = resourceWatcherService;
this.licenseModePath = licenseModePath;
this.logger = logger;
@ -95,14 +97,18 @@ public final class OperationModeFileWatcher implements FileChangesListener {
try {
content = Files.readAllBytes(licenseModePath);
} catch (IOException e) {
logger.error("couldn't read operation mode from [{}]", e, licenseModePath.toAbsolutePath().toString());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"couldn't read operation mode from [{}]", licenseModePath.toAbsolutePath()), e);
return;
}
String operationMode = new String(content, StandardCharsets.UTF_8);
try {
currentOperationMode = OperationMode.resolve(operationMode);
} catch (IllegalArgumentException e) {
logger.error("invalid operation mode in [{}]", e, licenseModePath.toAbsolutePath().toString());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"invalid operation mode in [{}]", licenseModePath.toAbsolutePath()), e);
return;
}
}

View File

@ -5,9 +5,10 @@
*/
package org.elasticsearch.xpack.monitoring.agent;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.ClusterSettings;
@ -157,7 +158,7 @@ public class AgentService extends AbstractLifecycleComponent {
try {
exporter.close();
} catch (Exception e) {
logger.error("failed to close exporter [{}]", e, exporter.name());
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to close exporter [{}]", exporter.name()), e);
}
}
}

View File

@ -5,12 +5,13 @@
*/
package org.elasticsearch.xpack.monitoring.agent.collector;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
@ -81,7 +82,7 @@ public abstract class AbstractCollector extends AbstractLifecycleComponent imple
} catch (ElasticsearchTimeoutException e) {
logger.error("collector [{}] timed out when collecting data", name());
} catch (Exception e) {
logger.error("collector [{}] - failed collecting data", e, name());
logger.error((Supplier<?>) () -> new ParameterizedMessage("collector [{}] - failed collecting data", name()), e);
}
return null;
}

View File

@ -5,11 +5,8 @@
*/
package org.elasticsearch.xpack.monitoring.agent.collector.cluster;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
@ -25,6 +22,11 @@ import org.elasticsearch.xpack.monitoring.agent.collector.AbstractCollector;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
import org.elasticsearch.xpack.security.InternalClient;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* Collector for cluster stats.
* <p>
@ -66,7 +68,9 @@ public class ClusterStatsCollector extends AbstractCollector {
clusterStats = client.admin().cluster().prepareClusterStats().get(monitoringSettings.clusterStatsTimeout());
} catch (ElasticsearchSecurityException e) {
if (LicenseUtils.isLicenseExpiredException(e)) {
logger.trace("collector [{}] - unable to collect data because of expired license", e, name());
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"collector [{}] - unable to collect data because of expired license", name()), e);
} else {
throw e;
}

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.monitoring.agent.exporter;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@ -34,7 +34,7 @@ public abstract class Exporter implements AutoCloseable {
public static final String USE_INGEST_PIPELINE_SETTING = "use_ingest";
protected final Config config;
protected final ESLogger logger;
protected final Logger logger;
@Nullable protected final TimeValue bulkTimeout;
@ -146,7 +146,7 @@ public abstract class Exporter implements AutoCloseable {
return settings;
}
public ESLogger logger(Class clazz) {
public Logger logger(Class clazz) {
return Loggers.getLogger(clazz, globalSettings, name);
}
}

View File

@ -5,10 +5,12 @@
*/
package org.elasticsearch.xpack.monitoring.agent.exporter;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.node.Node;
@ -81,12 +83,12 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable<Ex
return exporters.get().values().iterator();
}
static void closeExporters(ESLogger logger, Map<String, Exporter> exporters) {
static void closeExporters(Logger logger, Map<String, Exporter> exporters) {
for (Exporter exporter : exporters.values()) {
try {
exporter.close();
} catch (Exception e) {
logger.error("failed to close exporter [{}]", e, exporter.name());
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to close exporter [{}]", exporter.name()), e);
}
}
}
@ -107,7 +109,8 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable<Ex
bulks.add(bulk);
}
} catch (Exception e) {
logger.error("exporter [{}] failed to open exporting bulk", e, exporter.name());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage("exporter [{}] failed to open exporting bulk", exporter.name()), e);
}
}
return bulks.isEmpty() ? null : new ExportBulk.Compound(bulks);

View File

@ -5,6 +5,8 @@
*/
package org.elasticsearch.xpack.monitoring.agent.exporter.http;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
@ -336,7 +338,7 @@ public class HttpExporter extends Exporter {
doc.getClass().getName(), doc.getMonitoringId(), doc.getMonitoringVersion());
}
} catch (Exception e) {
logger.warn("failed to render document [{}], skipping it", e, doc);
logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to render document [{}], skipping it", doc), e);
}
}
@ -399,7 +401,9 @@ public class HttpExporter extends Exporter {
continue;
}
} catch (ElasticsearchException e) {
logger.error("exception when checking remote cluster version on host [{}]", e, host);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception when checking remote cluster version on host [{}]", host), e);
continue;
}
}
@ -497,9 +501,9 @@ public class HttpExporter extends Exporter {
return conn;
} catch (URISyntaxException e) {
logger.error("error parsing host [{}]", e, host);
logger.error((Supplier<?>) () -> new ParameterizedMessage("error parsing host [{}]", host), e);
} catch (IOException e) {
logger.error("error connecting to [{}]", e, host);
logger.error((Supplier<?>) () -> new ParameterizedMessage("error connecting to [{}]", host), e);
}
return null;
}
@ -568,7 +572,9 @@ public class HttpExporter extends Exporter {
return true;
}
} catch (Exception e) {
logger.error("failed to verify the monitoring pipeline [{}] on [{}]", e, EXPORT_PIPELINE_NAME, host);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to verify the monitoring pipeline [{}] on [{}]", EXPORT_PIPELINE_NAME, host), e);
return false;
} finally {
if (connection != null) {
@ -604,7 +610,9 @@ public class HttpExporter extends Exporter {
logger.info("monitoring pipeline [{}] set", EXPORT_PIPELINE_NAME);
return true;
} catch (IOException e) {
logger.error("failed to update monitoring pipeline [{}] on host [{}]", e, EXPORT_PIPELINE_NAME, host);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to update monitoring pipeline [{}] on host [{}]", EXPORT_PIPELINE_NAME, host), e);
return false;
} finally {
if (connection != null) {
@ -658,7 +666,9 @@ public class HttpExporter extends Exporter {
return true;
}
} catch (Exception e) {
logger.error("failed to verify the monitoring template [{}] on [{}]", e, templateName, host);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to verify the monitoring template [{}] on [{}]", templateName, host), e);
return false;
} finally {
if (connection != null) {
@ -692,7 +702,9 @@ public class HttpExporter extends Exporter {
logger.info("monitoring template [{}] updated ", template);
return true;
} catch (IOException e) {
logger.error("failed to update monitoring template [{}] on host [{}]", e, template, host);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to update monitoring template [{}] on host [{}]", template, host), e);
return false;
} finally {
if (connection != null) {

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.monitoring.agent.exporter.local;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.monitoring.agent.exporter.ExportBulk;
import org.elasticsearch.xpack.monitoring.agent.exporter.ExportException;
@ -29,7 +29,7 @@ import static org.elasticsearch.xpack.monitoring.agent.exporter.Exporter.EXPORT_
*/
public class LocalBulk extends ExportBulk {
private final ESLogger logger;
private final Logger logger;
private final InternalClient client;
private final ResolversRegistry resolvers;
private final boolean usePipeline;
@ -37,7 +37,7 @@ public class LocalBulk extends ExportBulk {
private BulkRequestBuilder requestBuilder;
public LocalBulk(String name, ESLogger logger, InternalClient client, ResolversRegistry resolvers, boolean usePipeline) {
public LocalBulk(String name, Logger logger, InternalClient client, ResolversRegistry resolvers, boolean usePipeline) {
super(name);
this.logger = logger;
this.client = client;

View File

@ -7,6 +7,8 @@ package org.elasticsearch.xpack.monitoring.agent.exporter.local;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
@ -21,14 +23,12 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.ingest.IngestMetadata;
import org.elasticsearch.xpack.common.init.proxy.ClientProxy;
import org.elasticsearch.xpack.monitoring.agent.exporter.ExportBulk;
import org.elasticsearch.xpack.monitoring.agent.exporter.Exporter;
import org.elasticsearch.xpack.monitoring.agent.exporter.MonitoringDoc;
@ -387,7 +387,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
@Override
public void onFailure(Exception e) {
logger.error("failed to set monitoring index {} [{}]", e, type, name);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to set monitoring index {} [{}]", type, name), e);
}
}
}

View File

@ -0,0 +1,15 @@
appender.audit_rolling.type = RollingFile
appender.audit_rolling.name = audit_rolling
appender.audit_rolling.fileName = ${sys:es.logs}_access.log
appender.audit_rolling.layout.type = PatternLayout
appender.audit_rolling.layout.pattern = [%d{ISO8601}] %m%n
appender.audit_rolling.filePattern = ${sys:es.logs}-%d{yyyy-MM-dd}.log
appender.audit_rolling.policies.type = Policies
appender.audit_rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.audit_rolling.policies.time.interval = 1
appender.audit_rolling.policies.time.modulate = true
logger.xpack_security_audit_logfile.name = xpack.security.audit.logfile
logger.xpack_security_audit_logfile.level = info
logger.xpack_security_audit_logfile.appenderRef.audit_rolling.ref = audit_rolling
logger.xpack_security_audit_logfile.additivity = false

View File

@ -1,15 +0,0 @@
logger:
xpack.security.audit.logfile: INFO, access_log
additivity:
xpack.security.audit.logfile: false
appender:
access_log:
type: dailyRollingFile
file: ${path.logs}/${cluster.name}-access.log
datePattern: "'.'yyyy-MM-dd"
layout:
type: pattern
conversionPattern: "[%d{ISO8601}] %m%n"

View File

@ -5,20 +5,7 @@
*/
package org.elasticsearch.xpack.security;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.ActionFilter;
@ -27,7 +14,6 @@ import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
@ -76,8 +62,8 @@ import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail;
import org.elasticsearch.xpack.security.audit.index.IndexNameResolver;
import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail;
import org.elasticsearch.xpack.security.authc.AuthenticationFailureHandler;
import org.elasticsearch.xpack.security.authc.DefaultAuthenticationFailureHandler;
import org.elasticsearch.xpack.security.authc.AuthenticationService;
import org.elasticsearch.xpack.security.authc.DefaultAuthenticationFailureHandler;
import org.elasticsearch.xpack.security.authc.Realm;
import org.elasticsearch.xpack.security.authc.Realms;
import org.elasticsearch.xpack.security.authc.activedirectory.ActiveDirectoryRealm;
@ -90,10 +76,10 @@ import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory;
import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.security.authz.AuthorizationService;
import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache;
import org.elasticsearch.xpack.security.authz.accesscontrol.SecurityIndexSearcherWrapper;
import org.elasticsearch.xpack.security.authz.accesscontrol.SetSecurityUserProcessor;
import org.elasticsearch.xpack.security.authz.AuthorizationService;
import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore;
import org.elasticsearch.xpack.security.authz.store.FileRolesStore;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
@ -123,6 +109,20 @@ import org.elasticsearch.xpack.security.user.AnonymousUser;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
@ -131,7 +131,7 @@ import static java.util.Collections.singletonList;
*/
public class Security implements ActionPlugin, IngestPlugin {
private static final ESLogger logger = Loggers.getLogger(XPackPlugin.class);
private static final Logger logger = Loggers.getLogger(XPackPlugin.class);
public static final String NAME3 = XPackPlugin.SECURITY + "3";
public static final String NAME4 = XPackPlugin.SECURITY + "4";

View File

@ -5,9 +5,7 @@
*/
package org.elasticsearch.xpack.security;
import java.io.IOException;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
@ -17,12 +15,14 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService;
import org.elasticsearch.xpack.security.crypto.CryptoService;
import org.elasticsearch.xpack.security.user.User;
import java.io.IOException;
/**
* A lightweight utility that can find the current user and authentication information for the local thread.
*/
public class SecurityContext {
private final ESLogger logger;
private final Logger logger;
private final ThreadContext threadContext;
private final CryptoService cryptoService;
private final boolean signUserHeader;

View File

@ -5,6 +5,9 @@
*/
package org.elasticsearch.xpack.security;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
@ -22,7 +25,6 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -146,7 +148,7 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste
@Override
public void onFailure(Exception e) {
updateMappingPending.set(false);
logger.warn("failed to update mapping for [{}] on security index", e, type);
logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to update mapping for [{}] on security index", type), e);
}
});
}
@ -175,7 +177,7 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste
});
}
static boolean securityIndexMappingUpToDate(ClusterState clusterState, ESLogger logger) {
static boolean securityIndexMappingUpToDate(ClusterState clusterState, Logger logger) {
IndexMetaData indexMetaData = clusterState.metaData().getIndices().get(SECURITY_INDEX_NAME);
if (indexMetaData != null) {
for (Object object : indexMetaData.getMappings().values().toArray()) {
@ -199,7 +201,7 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste
}
}
static boolean securityTemplateExistsAndIsUpToDate(ClusterState state, ESLogger logger) {
static boolean securityTemplateExistsAndIsUpToDate(ClusterState state, Logger logger) {
IndexTemplateMetaData templateMeta = state.metaData().templates().get(SECURITY_TEMPLATE_NAME);
if (templateMeta == null) {
return false;
@ -243,7 +245,7 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste
return true;
}
public static boolean securityIndexMappingAndTemplateUpToDate(ClusterState clusterState, ESLogger logger) {
public static boolean securityIndexMappingAndTemplateUpToDate(ClusterState clusterState, Logger logger) {
if (SecurityTemplateService.securityTemplateExistsAndIsUpToDate(clusterState, logger) == false) {
logger.debug("security template [{}] does not exist or is not up to date, so service cannot start",
SecurityTemplateService.SECURITY_TEMPLATE_NAME);

View File

@ -5,16 +5,18 @@
*/
package org.elasticsearch.xpack.security.action.role;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore;
public class TransportDeleteRoleAction extends HandledTransportAction<DeleteRoleRequest, DeleteRoleResponse> {
@ -49,7 +51,7 @@ public class TransportDeleteRoleAction extends HandledTransportAction<DeleteRole
}
});
} catch (Exception e) {
logger.error("failed to delete role [{}]", e, request.name());
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to delete role [{}]", request.name()), e);
listener.onFailure(e);
}
}

View File

@ -5,23 +5,26 @@
*/
package org.elasticsearch.xpack.security.action.role;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.permission.KibanaRole;
import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.permission.KibanaRole;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.common.Strings.arrayToDelimitedString;
public class TransportGetRolesAction extends HandledTransportAction<GetRolesRequest, GetRolesResponse> {
private final NativeRolesStore nativeRolesStore;
@ -78,7 +81,7 @@ public class TransportGetRolesAction extends HandledTransportAction<GetRolesRequ
@Override
public void onFailure(Exception t) {
logger.error("failed to retrieve role [{}]", t, rolename);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve role [{}]", rolename), t);
listener.onFailure(t);
}
});
@ -96,8 +99,9 @@ public class TransportGetRolesAction extends HandledTransportAction<GetRolesRequ
@Override
public void onFailure(Exception t) {
logger.error("failed to retrieve role [{}]", t,
Strings.arrayToDelimitedString(request.names(), ","));
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to retrieve role [{}]", arrayToDelimitedString(request.names(), ",")), t);
listener.onFailure(t);
}
});

View File

@ -5,24 +5,27 @@
*/
package org.elasticsearch.xpack.security.action.user;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
import org.elasticsearch.xpack.security.user.AnonymousUser;
import org.elasticsearch.xpack.security.user.SystemUser;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.common.Strings.arrayToDelimitedString;
public class TransportGetUsersAction extends HandledTransportAction<GetUsersRequest, GetUsersResponse> {
private final NativeUsersStore usersStore;
@ -79,7 +82,7 @@ public class TransportGetUsersAction extends HandledTransportAction<GetUsersRequ
@Override
public void onFailure(Exception e) {
logger.error("failed to retrieve user [{}]", e, username);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve user [{}]", username), e);
listener.onFailure(e);
}
});
@ -95,8 +98,9 @@ public class TransportGetUsersAction extends HandledTransportAction<GetUsersRequ
@Override
public void onFailure(Exception e) {
logger.error("failed to retrieve user [{}]", e,
Strings.arrayToDelimitedString(request.usernames(), ","));
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to retrieve user [{}]", arrayToDelimitedString(request.usernames(), ",")), e);
listener.onFailure(e);
}
});

View File

@ -5,18 +5,20 @@
*/
package org.elasticsearch.xpack.security.action.user;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
import org.elasticsearch.xpack.security.user.AnonymousUser;
import org.elasticsearch.xpack.security.user.SystemUser;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
public class TransportPutUserAction extends HandledTransportAction<PutUserRequest, PutUserResponse> {
@ -60,7 +62,7 @@ public class TransportPutUserAction extends HandledTransportAction<PutUserReques
@Override
public void onFailure(Exception e) {
logger.error("failed to put user [{}]", e, request.username());
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to put user [{}]", request.username()), e);
listener.onFailure(e);
}
});

View File

@ -5,6 +5,9 @@
*/
package org.elasticsearch.xpack.security.audit.index;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
@ -19,17 +22,16 @@ import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@ -45,18 +47,18 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.node.Node;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportMessage;
import org.elasticsearch.xpack.XPackTransportClient;
import org.elasticsearch.xpack.security.InternalClient;
import org.elasticsearch.xpack.security.user.SystemUser;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.user.XPackUser;
import org.elasticsearch.xpack.security.audit.AuditTrail;
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
import org.elasticsearch.xpack.security.authz.privilege.SystemPrivilege;
import org.elasticsearch.xpack.security.rest.RemoteHostHeader;
import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportMessage;
import org.elasticsearch.xpack.XPackTransportClient;
import org.elasticsearch.xpack.security.user.SystemUser;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.user.XPackUser;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@ -82,6 +84,7 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Function;
import static org.elasticsearch.xpack.security.Security.setting;
import static org.elasticsearch.xpack.security.audit.AuditUtil.indices;
import static org.elasticsearch.xpack.security.audit.AuditUtil.restRequestContent;
import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.ACCESS_DENIED;
@ -96,7 +99,6 @@ import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.SYSTE
import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.TAMPERED_REQUEST;
import static org.elasticsearch.xpack.security.audit.index.IndexAuditLevel.parse;
import static org.elasticsearch.xpack.security.audit.index.IndexNameResolver.resolve;
import static org.elasticsearch.xpack.security.Security.setting;
/**
* Audit trail implementation that writes events into an index.
@ -188,8 +190,12 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
try {
events = parse(includedEvents, excludedEvents);
} catch (IllegalArgumentException e) {
logger.warn("invalid event type specified, using default for audit index output. include events [{}], exclude events [{}]",
e, includedEvents, excludedEvents);
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"invalid event type specified, using default for audit index output. include events [{}], exclude events [{}]",
includedEvents,
excludedEvents),
e);
events = parse(DEFAULT_EVENT_INCLUDES, Collections.emptyList());
}
this.indexToRemoteCluster = REMOTE_CLIENT_SETTINGS.get(settings).names().size() > 0;
@ -706,7 +712,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
return eventQueue.peek();
}
private static Client initializeRemoteClient(Settings settings, ESLogger logger) {
private static Client initializeRemoteClient(Settings settings, Logger logger) {
Settings clientSettings = REMOTE_CLIENT_SETTINGS.get(settings);
String[] hosts = clientSettings.getAsArray("hosts");
if (hosts.length == 0) {
@ -842,7 +848,9 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
@Override
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
logger.error("failed to bulk index audit events: [{}]", failure, failure.getMessage());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to bulk index audit events: [{}]", failure.getMessage()), failure);
}
}).setBulkActions(bulkSize)
.setFlushInterval(interval)
@ -866,8 +874,9 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
INDEX_TEMPLATE_NAME);
threadPool.generic().execute(new AbstractRunnable() {
@Override
public void onFailure(Exception throwable) {
logger.error("failed to update security audit index template [{}]", throwable, INDEX_TEMPLATE_NAME);
public void onFailure(Exception e) {
logger.error((Supplier<?>) () -> new ParameterizedMessage(
"failed to update security audit index template [{}]", INDEX_TEMPLATE_NAME), e);
}
@Override

View File

@ -5,10 +5,10 @@
*/
package org.elasticsearch.xpack.security.audit.logfile;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.settings.Setting;
@ -18,16 +18,16 @@ import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.xpack.security.user.SystemUser;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.user.XPackUser;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportMessage;
import org.elasticsearch.xpack.security.audit.AuditTrail;
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
import org.elasticsearch.xpack.security.authz.privilege.SystemPrivilege;
import org.elasticsearch.xpack.security.rest.RemoteHostHeader;
import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportMessage;
import org.elasticsearch.xpack.security.user.SystemUser;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.user.XPackUser;
import java.net.InetAddress;
import java.net.InetSocketAddress;
@ -36,9 +36,9 @@ import java.util.List;
import java.util.Set;
import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString;
import static org.elasticsearch.xpack.security.Security.setting;
import static org.elasticsearch.xpack.security.audit.AuditUtil.indices;
import static org.elasticsearch.xpack.security.audit.AuditUtil.restRequestContent;
import static org.elasticsearch.xpack.security.Security.setting;
/**
*
@ -53,7 +53,7 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail {
public static final Setting<Boolean> NODE_NAME_SETTING =
Setting.boolSetting(setting("audit.logfile.prefix.emit_node_name"), true, Property.NodeScope);
private final ESLogger logger;
private final Logger logger;
private final ClusterService clusterService;
private final ThreadContext threadContext;
@ -68,7 +68,7 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail {
this(settings, clusterService, Loggers.getLogger(LoggingAuditTrail.class), threadPool.getThreadContext());
}
LoggingAuditTrail(Settings settings, ClusterService clusterService, ESLogger logger, ThreadContext threadContext) {
LoggingAuditTrail(Settings settings, ClusterService clusterService, Logger logger, ThreadContext threadContext) {
super(settings);
this.logger = logger;
this.clusterService = clusterService;

View File

@ -5,9 +5,8 @@
*/
package org.elasticsearch.xpack.security.authc;
import java.io.IOException;
import java.util.List;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
@ -18,14 +17,17 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.node.Node;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.xpack.security.audit.AuditTrailService;
import org.elasticsearch.xpack.security.authc.Authentication.RealmRef;
import org.elasticsearch.xpack.security.user.AnonymousUser;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.audit.AuditTrail;
import org.elasticsearch.xpack.security.crypto.CryptoService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportMessage;
import org.elasticsearch.xpack.security.audit.AuditTrail;
import org.elasticsearch.xpack.security.audit.AuditTrailService;
import org.elasticsearch.xpack.security.authc.Authentication.RealmRef;
import org.elasticsearch.xpack.security.crypto.CryptoService;
import org.elasticsearch.xpack.security.user.AnonymousUser;
import org.elasticsearch.xpack.security.user.User;
import java.io.IOException;
import java.util.List;
import static org.elasticsearch.xpack.security.Security.setting;
@ -188,7 +190,7 @@ public class AuthenticationService extends AbstractComponent {
}
} catch (Exception e) {
if (logger.isDebugEnabled()) {
logger.debug("failed to extract token from request: [{}]", e, request);
logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to extract token from request: [{}]", request), e);
} else {
logger.warn("failed to extract token from request: [{}]: {}", request, e.getMessage());
}
@ -228,7 +230,9 @@ public class AuthenticationService extends AbstractComponent {
}
}
} catch (Exception e) {
logger.debug("authentication failed for principal [{}], [{}] ", e, token.principal(), request);
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"authentication failed for principal [{}], [{}] ", token.principal(), request), e);
throw request.exceptionProcessingRequest(e, token);
} finally {
token.clearCredentials();
@ -282,7 +286,12 @@ public class AuthenticationService extends AbstractComponent {
// authorization error
user = new User(user.principal(), user.roles(), new User(runAsUsername, Strings.EMPTY_ARRAY));
} catch (Exception e) {
logger.debug("run as failed for principal [{}], [{}], run as username [{}]", e, token.principal(), request, runAsUsername);
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage("run as failed for principal [{}], [{}], run as username [{}]",
token.principal(),
request,
runAsUsername),
e);
throw request.exceptionProcessingRequest(e, token);
}
return user;

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.security.authc;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.xpack.security.user.User;
@ -19,7 +19,7 @@ import java.util.Map;
*/
public abstract class Realm implements Comparable<Realm> {
protected final ESLogger logger;
protected final Logger logger;
protected final String type;
protected RealmConfig config;

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.security.authc;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
@ -56,7 +56,7 @@ public class RealmConfig {
return globalSettings;
}
public ESLogger logger(Class clazz) {
public Logger logger(Class clazz) {
return Loggers.getLogger(clazz, globalSettings);
}

View File

@ -13,7 +13,9 @@ import com.unboundid.ldap.sdk.SearchRequest;
import com.unboundid.ldap.sdk.SearchResult;
import com.unboundid.ldap.sdk.SearchResultEntry;
import com.unboundid.ldap.sdk.SearchScope;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope;
@ -42,7 +44,7 @@ public class ActiveDirectoryGroupsResolver implements GroupsResolver {
}
@Override
public List<String> resolve(LDAPInterface connection, String userDn, TimeValue timeout, ESLogger logger,
public List<String> resolve(LDAPInterface connection, String userDn, TimeValue timeout, Logger logger,
Collection<Attribute> attributes) {
Filter groupSearchFilter = buildGroupQuery(connection, userDn, timeout, logger);
logger.debug("group SID to DN search filter: [{}]", groupSearchFilter);
@ -56,7 +58,7 @@ public class ActiveDirectoryGroupsResolver implements GroupsResolver {
try {
results = search(connection, searchRequest, logger);
} catch (LDAPException e) {
logger.error("failed to fetch AD groups for DN [{}]", e, userDn);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to fetch AD groups for DN [{}]", userDn), e);
return Collections.emptyList();
}
@ -76,7 +78,7 @@ public class ActiveDirectoryGroupsResolver implements GroupsResolver {
return null;
}
static Filter buildGroupQuery(LDAPInterface connection, String userDn, TimeValue timeout, ESLogger logger) {
static Filter buildGroupQuery(LDAPInterface connection, String userDn, TimeValue timeout, Logger logger) {
try {
SearchRequest request = new SearchRequest(userDn, SearchScope.BASE, OBJECT_CLASS_PRESENCE_FILTER, "tokenGroups");
request.setTimeLimitSeconds(Math.toIntExact(timeout.seconds()));
@ -92,7 +94,7 @@ public class ActiveDirectoryGroupsResolver implements GroupsResolver {
}
return Filter.createORFilter(orFilters);
} catch (LDAPException e) {
logger.error("failed to fetch AD groups for DN [{}]", e, userDn);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to fetch AD groups for DN [{}]", userDn), e);
return null;
}
}

View File

@ -11,10 +11,10 @@ import com.unboundid.ldap.sdk.LDAPConnectionOptions;
import com.unboundid.ldap.sdk.LDAPException;
import com.unboundid.ldap.sdk.SearchRequest;
import com.unboundid.ldap.sdk.SearchResult;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.authc.RealmConfig;
@ -106,12 +106,12 @@ public class ActiveDirectorySessionFactory extends SessionFactory {
abstract static class ADAuthenticator {
final TimeValue timeout;
final ESLogger logger;
final Logger logger;
final GroupsResolver groupsResolver;
final String userSearchDN;
final LdapSearchScope userSearchScope;
ADAuthenticator(Settings settings, TimeValue timeout, ESLogger logger, GroupsResolver groupsResolver, String domainDN) {
ADAuthenticator(Settings settings, TimeValue timeout, Logger logger, GroupsResolver groupsResolver, String domainDN) {
this.timeout = timeout;
this.logger = logger;
this.groupsResolver = groupsResolver;
@ -161,7 +161,7 @@ public class ActiveDirectorySessionFactory extends SessionFactory {
final String userSearchFilter;
final String domainName;
DefaultADAuthenticator(Settings settings, TimeValue timeout, ESLogger logger, GroupsResolver groupsResolver, String domainDN) {
DefaultADAuthenticator(Settings settings, TimeValue timeout, Logger logger, GroupsResolver groupsResolver, String domainDN) {
super(settings, timeout, logger, groupsResolver, domainDN);
domainName = settings.get(AD_DOMAIN_NAME_SETTING);
userSearchFilter = settings.get(AD_USER_SEARCH_FILTER_SETTING, "(&(objectClass=user)(|(sAMAccountName={0})" +
@ -190,7 +190,7 @@ public class ActiveDirectorySessionFactory extends SessionFactory {
final String domainDN;
final Settings settings;
DownLevelADAuthenticator(Settings settings, TimeValue timeout, ESLogger logger, GroupsResolver groupsResolver, String domainDN) {
DownLevelADAuthenticator(Settings settings, TimeValue timeout, Logger logger, GroupsResolver groupsResolver, String domainDN) {
super(settings, timeout, logger, groupsResolver, domainDN);
this.domainDN = domainDN;
this.settings = settings;
@ -271,7 +271,7 @@ public class ActiveDirectorySessionFactory extends SessionFactory {
private static final String UPN_USER_FILTER = "(&(objectClass=user)(|(sAMAccountName={0})(userPrincipalName={1})))";
UpnADAuthenticator(Settings settings, TimeValue timeout, ESLogger logger, GroupsResolver groupsResolver, String domainDN) {
UpnADAuthenticator(Settings settings, TimeValue timeout, Logger logger, GroupsResolver groupsResolver, String domainDN) {
super(settings, timeout, logger, groupsResolver, domainDN);
}

View File

@ -9,7 +9,8 @@ import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.ObjectLongMap;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
@ -47,6 +48,9 @@ import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPool.Cancellable;
import org.elasticsearch.threadpool.ThreadPool.Names;
import org.elasticsearch.xpack.security.InternalClient;
import org.elasticsearch.xpack.security.SecurityTemplateService;
import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheRequest;
@ -60,9 +64,6 @@ import org.elasticsearch.xpack.security.client.SecurityClient;
import org.elasticsearch.xpack.security.user.SystemUser;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.user.User.Fields;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPool.Cancellable;
import org.elasticsearch.threadpool.ThreadPool.Names;
import java.util.ArrayList;
import java.util.Arrays;
@ -164,7 +165,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
// we call the response with a null user
listener.onResponse(null);
} else {
logger.debug("failed to retrieve user [{}]", t, username);
logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve user [{}]", username), t);
listener.onFailure(t);
}
}
@ -243,7 +244,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
}
});
} catch (Exception e) {
logger.error("unable to retrieve users {}", e, Arrays.toString(usernames));
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to retrieve users {}", Arrays.toString(usernames)), e);
listener.onFailure(e);
}
}
@ -260,9 +261,11 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
@Override
public void onFailure(Exception t) {
if (t instanceof IndexNotFoundException) {
logger.trace("failed to retrieve user [{}] since security index does not exist", t, username);
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to retrieve user [{}] since security index does not exist", username), t);
} else {
logger.error("failed to retrieve user [{}]", t, username);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve user [{}]", username), t);
}
}
}, latch));
@ -287,9 +290,11 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
@Override
public void onFailure(Exception t) {
if (t instanceof IndexNotFoundException) {
logger.trace("could not retrieve user [{}] because security index does not exist", t, user);
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"could not retrieve user [{}] because security index does not exist", user), t);
} else {
logger.error("failed to retrieve user [{}]", t, user);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve user [{}]", user), t);
}
// We don't invoke the onFailure listener here, instead
// we call the response with a null user
@ -300,7 +305,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
logger.trace("could not retrieve user [{}] because security index does not exist", user);
listener.onResponse(null);
} catch (Exception e) {
logger.error("unable to retrieve user [{}]", e, user);
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to retrieve user [{}]", user), e);
listener.onFailure(e);
}
}
@ -346,7 +351,9 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
if (docType.equals(RESERVED_USER_DOC_TYPE)) {
createReservedUser(username, request.passwordHash(), request.getRefreshPolicy(), listener);
} else {
logger.debug("failed to change password for user [{}]", cause, request.username());
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to change password for user [{}]", request.username()), cause);
ValidationException validationException = new ValidationException();
validationException.addValidationError("user must exist in order to change password");
listener.onFailure(validationException);
@ -385,7 +392,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
indexUser(request, listener);
}
} catch (Exception e) {
logger.error("unable to put user [{}]", e, request.username());
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to put user [{}]", request.username()), e);
listener.onFailure(e);
}
}
@ -421,7 +428,11 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
// if the index doesn't exist we can never update a user
// if the document doesn't exist, then this update is not valid
logger.debug("failed to update user document with username [{}]", cause, putUserRequest.username());
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to update user document with username [{}]",
putUserRequest.username()),
cause);
ValidationException validationException = new ValidationException();
validationException.addValidationError("password must be specified unless you are updating an existing user");
listener.onFailure(validationException);
@ -614,9 +625,15 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
@Override
public void onFailure(Exception e) {
if (e instanceof IndexNotFoundException) {
logger.trace("could not retrieve built in user [{}] password since security index does not exist", e, username);
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"could not retrieve built in user [{}] password since security index does not exist",
username),
e);
} else {
logger.error("failed to retrieve built in user [{}] password", e, username);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to retrieve built in user [{}] password", username), e);
failure.set(e);
}
}
@ -650,7 +667,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
@Override
public void onFailure(Exception t) {
// Not really much to do here except for warn about it...
logger.warn("failed to clear scroll [{}]", t, scrollId);
logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to clear scroll [{}]", scrollId), t);
}
});
}
@ -667,7 +684,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
@Override
public void onFailure(Exception e) {
logger.error("unable to clear realm cache for user [{}]", e, username);
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to clear realm cache for user [{}]", username), e);
ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + username
+ "] failed. please clear the realm cache manually", e);
listener.onFailure(exception);
@ -717,7 +734,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
Map<String, Object> metadata = (Map<String, Object>) sourceMap.get(User.Fields.METADATA.getPreferredName());
return new UserAndPassword(new User(username, roles, fullName, email, metadata), password.toCharArray());
} catch (Exception e) {
logger.error("error in the format of data for user [{}]", e, username);
logger.error((Supplier<?>) () -> new ParameterizedMessage("error in the format of data for user [{}]", username), e);
return null;
}
}

View File

@ -5,7 +5,8 @@
*/
package org.elasticsearch.xpack.security.authc.esnative;
import org.elasticsearch.common.inject.Inject;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.security.authc.RealmConfig;
@ -16,9 +17,9 @@ import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.security.support.Exceptions;
import org.elasticsearch.xpack.security.user.AnonymousUser;
import org.elasticsearch.xpack.security.user.ElasticUser;
import org.elasticsearch.xpack.security.user.KibanaUser;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.user.ElasticUser;
import java.util.Arrays;
import java.util.Collection;
@ -131,7 +132,8 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
}
return passwordHash;
} catch (Exception e) {
logger.error("failed to retrieve password hash for reserved user [{}]", e, username);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage("failed to retrieve password hash for reserved user [{}]", username), e);
return null;
}
}

View File

@ -5,10 +5,16 @@
*/
package org.elasticsearch.xpack.security.authc.file;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.env.Environment;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.authc.support.Hasher;
import org.elasticsearch.xpack.security.authc.support.RefreshListener;
@ -16,10 +22,6 @@ import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.support.NoOpLogger;
import org.elasticsearch.xpack.security.support.Validation;
import org.elasticsearch.xpack.security.support.Validation.Users;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import java.io.IOException;
import java.io.PrintWriter;
@ -38,7 +40,7 @@ import static org.elasticsearch.xpack.security.support.SecurityFiles.openAtomicM
public class FileUserPasswdStore {
private final ESLogger logger;
private final Logger logger;
private final Path file;
final Hasher hasher = Hasher.BCRYPT;
@ -97,11 +99,13 @@ public class FileUserPasswdStore {
* Internally in this class, we try to load the file, but if for some reason we can't, we're being more lenient by
* logging the error and skipping all users. This is aligned with how we handle other auto-loaded files in security.
*/
static Map<String, char[]> parseFileLenient(Path path, ESLogger logger) {
static Map<String, char[]> parseFileLenient(Path path, Logger logger) {
try {
return parseFile(path, logger);
} catch (Exception e) {
logger.error("failed to parse users file [{}]. skipping/removing all users...", e, path.toAbsolutePath());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to parse users file [{}]. skipping/removing all users...", path.toAbsolutePath()), e);
return emptyMap();
}
}
@ -110,7 +114,7 @@ public class FileUserPasswdStore {
* parses the users file. Should never return {@code null}, if the file doesn't exist an
* empty map is returned
*/
public static Map<String, char[]> parseFile(Path path, @Nullable ESLogger logger) {
public static Map<String, char[]> parseFile(Path path, @Nullable Logger logger) {
if (logger == null) {
logger = NoOpLogger.INSTANCE;
}

View File

@ -5,19 +5,21 @@
*/
package org.elasticsearch.xpack.security.authc.file;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.authc.support.RefreshListener;
import org.elasticsearch.xpack.security.support.NoOpLogger;
import org.elasticsearch.xpack.security.support.Validation;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.authc.support.RefreshListener;
import org.elasticsearch.xpack.security.support.NoOpLogger;
import org.elasticsearch.xpack.security.support.Validation;
import java.io.IOException;
import java.io.PrintWriter;
@ -40,7 +42,7 @@ public class FileUserRolesStore {
private static final Pattern USERS_DELIM = Pattern.compile("\\s*,\\s*");
private final ESLogger logger;
private final Logger logger;
private final Path file;
private CopyOnWriteArrayList<RefreshListener> listeners;
@ -92,11 +94,14 @@ public class FileUserRolesStore {
* Internally in this class, we try to load the file, but if for some reason we can't, we're being more lenient by
* logging the error and skipping all enries. This is aligned with how we handle other auto-loaded files in security.
*/
static Map<String, String[]> parseFileLenient(Path path, ESLogger logger) {
static Map<String, String[]> parseFileLenient(Path path, Logger logger) {
try {
return parseFile(path, logger);
} catch (Exception e) {
logger.error("failed to parse users_roles file [{}]. skipping/removing all entries...", e, path.toAbsolutePath());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage("failed to parse users_roles file [{}]. skipping/removing all entries...",
path.toAbsolutePath()),
e);
return emptyMap();
}
}
@ -106,7 +111,7 @@ public class FileUserRolesStore {
* an empty map is returned. The read file holds a mapping per line of the form "role -&gt; users" while the returned
* map holds entries of the form "user -&gt; roles".
*/
public static Map<String, String[]> parseFile(Path path, @Nullable ESLogger logger) {
public static Map<String, String[]> parseFile(Path path, @Nullable Logger logger) {
if (logger == null) {
logger = NoOpLogger.INSTANCE;
}

View File

@ -7,6 +7,8 @@ package org.elasticsearch.xpack.security.authc.ldap;
import com.unboundid.ldap.sdk.LDAPConnection;
import com.unboundid.ldap.sdk.LDAPException;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession;
@ -64,7 +66,8 @@ public class LdapSessionFactory extends SessionFactory {
} catch (LDAPException e) {
// we catch the ldapException here since we expect it can happen and we shouldn't be logging this all the time otherwise
// it is just noise
logger.debug("failed LDAP authentication with user template [{}] and DN [{}]", e, template, dn);
logger.debug((Supplier<?>) () -> new ParameterizedMessage(
"failed LDAP authentication with user template [{}] and DN [{}]", template, dn), e);
if (lastException == null) {
lastException = e;
} else {

View File

@ -15,7 +15,7 @@ import com.unboundid.ldap.sdk.SearchRequest;
import com.unboundid.ldap.sdk.SearchResultEntry;
import com.unboundid.ldap.sdk.ServerSet;
import com.unboundid.ldap.sdk.SimpleBindRequest;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.authc.RealmConfig;
@ -67,7 +67,7 @@ class LdapUserSearchSessionFactory extends SessionFactory {
}
}
static LDAPConnectionPool createConnectionPool(RealmConfig config, ServerSet serverSet, TimeValue timeout, ESLogger logger)
static LDAPConnectionPool createConnectionPool(RealmConfig config, ServerSet serverSet, TimeValue timeout, Logger logger)
throws LDAPException {
Settings settings = config.settings();
SimpleBindRequest bindRequest = bindRequest(settings);

View File

@ -12,7 +12,7 @@ import com.unboundid.ldap.sdk.SearchRequest;
import com.unboundid.ldap.sdk.SearchResult;
import com.unboundid.ldap.sdk.SearchResultEntry;
import com.unboundid.ldap.sdk.SearchScope;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope;
@ -54,7 +54,7 @@ class SearchGroupsResolver implements GroupsResolver {
}
@Override
public List<String> resolve(LDAPInterface connection, String userDn, TimeValue timeout, ESLogger logger,
public List<String> resolve(LDAPInterface connection, String userDn, TimeValue timeout, Logger logger,
Collection<Attribute> attributes) throws LDAPException {
String userId = getUserId(userDn, attributes, connection, timeout, logger);
if (userId == null) {
@ -81,7 +81,7 @@ class SearchGroupsResolver implements GroupsResolver {
}
private String getUserId(String dn, Collection<Attribute> attributes, LDAPInterface connection, TimeValue
timeout, ESLogger logger) throws LDAPException {
timeout, Logger logger) throws LDAPException {
if (userAttribute == null) {
return dn;
}
@ -97,7 +97,7 @@ class SearchGroupsResolver implements GroupsResolver {
return readUserAttribute(connection, dn, timeout, logger);
}
String readUserAttribute(LDAPInterface connection, String userDn, TimeValue timeout, ESLogger logger) throws LDAPException {
String readUserAttribute(LDAPInterface connection, String userDn, TimeValue timeout, Logger logger) throws LDAPException {
SearchRequest request = new SearchRequest(userDn, SearchScope.BASE, OBJECT_CLASS_PRESENCE_FILTER, userAttribute);
request.setTimeLimitSeconds(Math.toIntExact(timeout.seconds()));
SearchResultEntry results = searchForEntry(connection, request, logger);

View File

@ -11,7 +11,7 @@ import com.unboundid.ldap.sdk.LDAPInterface;
import com.unboundid.ldap.sdk.SearchRequest;
import com.unboundid.ldap.sdk.SearchResultEntry;
import com.unboundid.ldap.sdk.SearchScope;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession.GroupsResolver;
@ -41,7 +41,7 @@ class UserAttributeGroupsResolver implements GroupsResolver {
}
@Override
public List<String> resolve(LDAPInterface connection, String userDn, TimeValue timeout, ESLogger logger,
public List<String> resolve(LDAPInterface connection, String userDn, TimeValue timeout, Logger logger,
Collection<Attribute> attributes) throws LDAPException {
if (attributes != null) {
for (Attribute attribute : attributes) {

View File

@ -5,17 +5,19 @@
*/
package org.elasticsearch.xpack.security.authc.ldap.support;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.unboundid.ldap.sdk.LDAPException;
import org.elasticsearch.xpack.security.user.User;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm;
import org.elasticsearch.xpack.security.authc.support.DnRoleMapper;
import org.elasticsearch.xpack.security.authc.support.RefreshListener;
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.security.user.User;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Supporting class for LDAP realms
@ -75,7 +77,7 @@ public abstract class AbstractLdapRealm extends CachingUsernamePasswordRealm {
private void logException(String action, Exception e, String principal) {
if (logger.isDebugEnabled()) {
logger.debug("{} failed for user [{}]", e, action, principal);
logger.debug((Supplier<?>) () -> new ParameterizedMessage("{} failed for user [{}]", action, principal), e);
} else {
String causeMessage = (e.getCause() == null) ? null : e.getCause().getMessage();
if (causeMessage == null) {

View File

@ -9,7 +9,7 @@ import com.unboundid.ldap.sdk.Attribute;
import com.unboundid.ldap.sdk.LDAPConnection;
import com.unboundid.ldap.sdk.LDAPException;
import com.unboundid.ldap.sdk.LDAPInterface;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.unit.TimeValue;
import java.io.Closeable;
@ -21,7 +21,7 @@ import java.util.List;
*/
public class LdapSession implements Closeable {
protected final ESLogger logger;
protected final Logger logger;
protected final LDAPInterface ldap;
protected final String userDn;
protected final GroupsResolver groupsResolver;
@ -36,7 +36,7 @@ public class LdapSession implements Closeable {
* outside of and be reused across all connections. We can't keep a static logger in this class
* since we want the logger to be contextual (i.e. aware of the settings and its environment).
*/
public LdapSession(ESLogger logger, LDAPInterface connection, String userDn, GroupsResolver groupsResolver, TimeValue timeout,
public LdapSession(Logger logger, LDAPInterface connection, String userDn, GroupsResolver groupsResolver, TimeValue timeout,
Collection<Attribute> attributes) {
this.logger = logger;
this.ldap = connection;
@ -73,7 +73,7 @@ public class LdapSession implements Closeable {
public interface GroupsResolver {
List<String> resolve(LDAPInterface ldapConnection, String userDn, TimeValue timeout, ESLogger logger,
List<String> resolve(LDAPInterface ldapConnection, String userDn, TimeValue timeout, Logger logger,
Collection<Attribute> attributes) throws LDAPException;
String[] attributes();

View File

@ -14,7 +14,9 @@ import com.unboundid.ldap.sdk.ResultCode;
import com.unboundid.ldap.sdk.SearchRequest;
import com.unboundid.ldap.sdk.SearchResult;
import com.unboundid.ldap.sdk.SearchResultEntry;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import javax.naming.ldap.Rdn;
import java.text.MessageFormat;
@ -48,15 +50,18 @@ public final class LdapUtils {
* This method performs a LDAPConnection.search(...) operation while handling referral exceptions. This is necessary
* to maintain backwards compatibility with the original JNDI implementation
*/
public static SearchResult search(LDAPInterface ldap, SearchRequest searchRequest, ESLogger logger) throws LDAPException {
public static SearchResult search(LDAPInterface ldap, SearchRequest searchRequest, Logger logger) throws LDAPException {
SearchResult results;
try {
results = ldap.search(searchRequest);
} catch (LDAPSearchException e) {
if (e.getResultCode().equals(ResultCode.REFERRAL) && e.getSearchResult() != null) {
if (logger.isDebugEnabled()) {
logger.debug("a referral could not be followed for request [{}] so some results may not have been retrieved", e,
searchRequest);
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"a referral could not be followed for request [{}] so some results may not have been retrieved",
searchRequest),
e);
}
results = e.getSearchResult();
} else {
@ -70,15 +75,18 @@ public final class LdapUtils {
* This method performs a LDAPConnection.searchForEntry(...) operation while handling referral exceptions. This is necessary
* to maintain backwards compatibility with the original JNDI implementation
*/
public static SearchResultEntry searchForEntry(LDAPInterface ldap, SearchRequest searchRequest, ESLogger logger) throws LDAPException {
public static SearchResultEntry searchForEntry(LDAPInterface ldap, SearchRequest searchRequest, Logger logger) throws LDAPException {
SearchResultEntry entry;
try {
entry = ldap.searchForEntry(searchRequest);
} catch (LDAPSearchException e) {
if (e.getResultCode().equals(ResultCode.REFERRAL) && e.getSearchResult() != null && e.getSearchResult().getEntryCount() > 0) {
if (logger.isDebugEnabled()) {
logger.debug("a referral could not be followed for request [{}] so some results may not have been retrieved", e,
searchRequest);
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"a referral could not be followed for request [{}] so some results may not have been retrieved",
searchRequest),
e);
}
entry = e.getSearchResult().getSearchEntries().get(0);
} else {

View File

@ -10,8 +10,8 @@ import com.unboundid.ldap.sdk.LDAPException;
import com.unboundid.ldap.sdk.LDAPURL;
import com.unboundid.ldap.sdk.ServerSet;
import com.unboundid.util.ssl.HostNameSSLSocketVerifier;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.authc.RealmConfig;
@ -47,7 +47,7 @@ public abstract class SessionFactory {
private static final Pattern STARTS_WITH_LDAPS = Pattern.compile("^ldaps:.*", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_LDAP = Pattern.compile("^ldap:.*", Pattern.CASE_INSENSITIVE);
protected final ESLogger logger;
protected final Logger logger;
protected final RealmConfig config;
protected final TimeValue timeout;
protected final SSLService sslService;

View File

@ -5,14 +5,16 @@
*/
package org.elasticsearch.xpack.security.authc.pki;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.Environment;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.security.Security;
import org.elasticsearch.xpack.security.user.User;
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
import org.elasticsearch.xpack.security.authc.Realm;
import org.elasticsearch.xpack.security.authc.RealmConfig;
@ -20,8 +22,7 @@ import org.elasticsearch.xpack.security.authc.support.DnRoleMapper;
import org.elasticsearch.xpack.security.transport.SSLClientAuth;
import org.elasticsearch.xpack.security.transport.netty3.SecurityNetty3HttpServerTransport;
import org.elasticsearch.xpack.security.transport.netty3.SecurityNetty3Transport;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.security.user.User;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
@ -98,7 +99,7 @@ public class PkiRealm extends Realm {
return false;
}
static X509AuthenticationToken token(Object pkiHeaderValue, Pattern principalPattern, ESLogger logger) {
static X509AuthenticationToken token(Object pkiHeaderValue, Pattern principalPattern, Logger logger) {
if (pkiHeaderValue == null) {
return null;
}
@ -128,7 +129,7 @@ public class PkiRealm extends Realm {
return new X509AuthenticationToken(certificates, principal, dn);
}
static boolean isCertificateChainTrusted(X509TrustManager[] trustManagers, X509AuthenticationToken token, ESLogger logger) {
static boolean isCertificateChainTrusted(X509TrustManager[] trustManagers, X509AuthenticationToken token, Logger logger) {
if (trustManagers.length > 0) {
boolean trusted = false;
for (X509TrustManager trustManager : trustManagers) {
@ -138,7 +139,9 @@ public class PkiRealm extends Realm {
break;
} catch (CertificateException e) {
if (logger.isTraceEnabled()) {
logger.trace("failed certificate validation for principal [{}]", e, token.principal());
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"failed certificate validation for principal [{}]", token.principal()), e);
} else if (logger.isDebugEnabled()) {
logger.debug("failed certificate validation for principal [{}]", token.principal());
}
@ -200,7 +203,7 @@ public class PkiRealm extends Realm {
* @param config this realm's configuration
* @param logger the logger to use if there is a configuration issue
*/
static void checkSSLEnabled(RealmConfig config, ESLogger logger) {
static void checkSSLEnabled(RealmConfig config, Logger logger) {
Settings settings = config.globalSettings();
final boolean httpSsl = SecurityNetty3HttpServerTransport.SSL_SETTING.get(settings);

View File

@ -5,12 +5,13 @@
*/
package org.elasticsearch.xpack.security.authc.support;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.cache.CacheLoader;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.authc.Authentication;
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.support.Exceptions;
@ -130,7 +131,9 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
}
if (logger.isTraceEnabled()) {
logger.trace("realm [{}] could not authenticate [{}]", ee, type(), token.principal());
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"realm [{}] could not authenticate [{}]", type(), token.principal()), ee);
} else if (logger.isDebugEnabled()) {
logger.debug("realm [{}] could not authenticate [{}]", type(), token.principal());
}
@ -160,7 +163,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
return userWithHash.user;
} catch (ExecutionException ee) {
if (logger.isTraceEnabled()) {
logger.trace("realm [{}] could not lookup [{}]", ee, name(), username);
logger.trace((Supplier<?>) () -> new ParameterizedMessage("realm [{}] could not lookup [{}]", name(), username), ee);
} else if (logger.isDebugEnabled()) {
logger.debug("realm [{}] could not authenticate [{}]", name(), username);
}

View File

@ -7,16 +7,18 @@ package org.elasticsearch.xpack.security.authc.support;
import com.unboundid.ldap.sdk.DN;
import com.unboundid.ldap.sdk.LDAPException;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import java.io.IOException;
import java.io.InputStream;
@ -44,7 +46,7 @@ public class DnRoleMapper {
public static final String ROLE_MAPPING_FILE_SETTING = "files.role_mapping";
public static final String USE_UNMAPPED_GROUPS_AS_ROLES_SETTING = "unmapped_groups_as_roles";
protected final ESLogger logger;
protected final Logger logger;
protected final RealmConfig config;
private final String realmType;
@ -89,16 +91,18 @@ public class DnRoleMapper {
* logging the error and skipping/removing all mappings. This is aligned with how we handle other auto-loaded files
* in security.
*/
public static Map<DN, Set<String>> parseFileLenient(Path path, ESLogger logger, String realmType, String realmName) {
public static Map<DN, Set<String>> parseFileLenient(Path path, Logger logger, String realmType, String realmName) {
try {
return parseFile(path, logger, realmType, realmName);
} catch (Exception e) {
logger.error("failed to parse role mappings file [{}]. skipping/removing all mappings...", e, path.toAbsolutePath());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to parse role mappings file [{}]. skipping/removing all mappings...", path.toAbsolutePath()), e);
return emptyMap();
}
}
public static Map<DN, Set<String>> parseFile(Path path, ESLogger logger, String realmType, String realmName) {
public static Map<DN, Set<String>> parseFile(Path path, Logger logger, String realmType, String realmName) {
logger.trace("reading realm [{}/{}] role mappings file [{}]...", realmType, realmName, path.toAbsolutePath());
@ -124,8 +128,15 @@ public class DnRoleMapper {
}
dnRoles.add(role);
} catch (LDAPException e) {
logger.error("invalid DN [{}] found in [{}] role mappings [{}] for realm [{}/{}]. skipping... ", e, providedDn,
realmType, path.toAbsolutePath(), realmType, realmName);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"invalid DN [{}] found in [{}] role mappings [{}] for realm [{}/{}]. skipping... ",
providedDn,
realmType,
path.toAbsolutePath(),
realmType,
realmName),
e);
}
}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.security.authz.accesscontrol;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.BooleanQuery;
@ -33,7 +34,6 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.client.FilterClient;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.ThreadContext;
@ -104,7 +104,7 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
private final BitsetFilterCache bitsetFilterCache;
private final XPackLicenseState licenseState;
private final ThreadContext threadContext;
private final ESLogger logger;
private final Logger logger;
private final ScriptService scriptService;
public SecurityIndexSearcherWrapper(IndexSettings indexSettings, QueryShardContext queryShardContext,

View File

@ -5,22 +5,13 @@
*/
package org.elasticsearch.xpack.security.authz.store;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
@ -37,6 +28,17 @@ import org.elasticsearch.xpack.security.authz.permission.Role;
import org.elasticsearch.xpack.security.support.NoOpLogger;
import org.elasticsearch.xpack.security.support.Validation;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static java.util.Collections.unmodifiableMap;
@ -115,7 +117,7 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
return XPackPlugin.resolveConfigFile(env, "roles.yml");
}
public static Set<String> parseFileForRoleNames(Path path, ESLogger logger) {
public static Set<String> parseFileForRoleNames(Path path, Logger logger) {
Map<String, Role> roleMap = parseFile(path, logger, false, Settings.EMPTY);
if (roleMap == null) {
return emptySet();
@ -123,11 +125,11 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
return roleMap.keySet();
}
public static Map<String, Role> parseFile(Path path, ESLogger logger, Settings settings) {
public static Map<String, Role> parseFile(Path path, Logger logger, Settings settings) {
return parseFile(path, logger, true, settings);
}
public static Map<String, Role> parseFile(Path path, ESLogger logger, boolean resolvePermission, Settings settings) {
public static Map<String, Role> parseFile(Path path, Logger logger, boolean resolvePermission, Settings settings) {
if (logger == null) {
logger = NoOpLogger.INSTANCE;
}
@ -150,7 +152,11 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
}
} catch (IOException ioe) {
logger.error("failed to read roles file [{}]. skipping all roles...", ioe, path.toAbsolutePath());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to read roles file [{}]. skipping all roles...",
path.toAbsolutePath()),
ioe);
return emptyMap();
}
} else {
@ -162,7 +168,7 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
return unmodifiableMap(roles);
}
public static Map<String, RoleDescriptor> parseRoleDescriptors(Path path, ESLogger logger,
public static Map<String, RoleDescriptor> parseRoleDescriptors(Path path, Logger logger,
boolean resolvePermission, Settings settings) {
if (logger == null) {
logger = NoOpLogger.INSTANCE;
@ -180,7 +186,11 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
}
}
} catch (IOException ioe) {
logger.error("failed to read roles file [{}]. skipping all roles...", ioe, path.toAbsolutePath());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to read roles file [{}]. skipping all roles...",
path.toAbsolutePath()),
ioe);
return emptyMap();
}
}
@ -188,7 +198,7 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
}
@Nullable
private static Role parseRole(String segment, Path path, ESLogger logger, boolean resolvePermissions, Settings settings) {
private static Role parseRole(String segment, Path path, Logger logger, boolean resolvePermissions, Settings settings) {
RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermissions, settings);
if (descriptor != null) {
@ -210,7 +220,7 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
}
@Nullable
private static RoleDescriptor parseRoleDescriptor(String segment, Path path, ESLogger logger,
private static RoleDescriptor parseRoleDescriptor(String segment, Path path, Logger logger,
boolean resolvePermissions, Settings settings) {
String roleName = null;
try {
@ -245,15 +255,26 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
} catch (ElasticsearchParseException e) {
assert roleName != null;
if (logger.isDebugEnabled()) {
logger.debug("parsing exception for role [{}]", e, roleName);
final String finalRoleName = roleName;
logger.debug((Supplier<?>) () -> new ParameterizedMessage("parsing exception for role [{}]", finalRoleName), e);
} else {
logger.error(e.getMessage() + ". skipping role...");
}
} catch (IOException e) {
if (roleName != null) {
logger.error("invalid role definition [{}] in roles file [{}]. skipping role...", e, roleName, path);
final String finalRoleName = roleName;
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"invalid role definition [{}] in roles file [{}]. skipping role...",
finalRoleName,
path),
e);
} else {
logger.error("invalid role definition in roles file [{}]. skipping role...", e, path);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"invalid role definition in roles file [{}]. skipping role...",
path),
e);
}
}
return null;
@ -301,7 +322,9 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
permissions = parseFile(file, logger, settings);
logger.info("updated roles (roles file [{}] changed)", file.toAbsolutePath());
} catch (Exception e) {
logger.error("could not reload roles file [{}]. Current roles remain unmodified", e, file.toAbsolutePath());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"could not reload roles file [{}]. Current roles remain unmodified", file.toAbsolutePath()), e);
return;
}
listener.onRefresh();

View File

@ -5,19 +5,8 @@
*/
package org.elasticsearch.xpack.security.authz.store;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteResponse;
@ -67,6 +56,19 @@ import org.elasticsearch.xpack.security.authz.permission.IndicesPermission.Group
import org.elasticsearch.xpack.security.authz.permission.Role;
import org.elasticsearch.xpack.security.client.SecurityClient;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.xpack.security.Security.setting;
import static org.elasticsearch.xpack.security.SecurityTemplateService.securityIndexMappingAndTemplateUpToDate;
@ -240,7 +242,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
}
});
} catch (Exception e) {
logger.error("unable to retrieve roles {}", e, Arrays.toString(names));
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to retrieve roles {}", Arrays.toString(names)), e);
listener.onFailure(e);
}
}
@ -306,12 +308,12 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
@Override
public void onFailure(Exception e) {
logger.error("failed to put role [{}]", e, request.name());
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to put role [{}]", request.name()), e);
listener.onFailure(e);
}
});
} catch (Exception e) {
logger.error("unable to put role [{}]", e, request.name());
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to put role [{}]", request.name()), e);
listener.onFailure(e);
}
@ -419,9 +421,11 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
@Override
public void onFailure(Exception t) {
if (t instanceof IndexNotFoundException) {
logger.trace("failed to retrieve role [{}] since security index does not exist", t, roleId);
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to retrieve role [{}] since security index does not exist", roleId), t);
} else {
logger.error("failed to retrieve role [{}]", t, roleId);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve role [{}]", roleId), t);
}
}
}, latch));
@ -448,9 +452,9 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
});
} catch (ExecutionException e) {
if (e.getCause() instanceof NullPointerException) {
logger.trace("role [{}] was not found", e, roleId);
logger.trace((Supplier<?>) () -> new ParameterizedMessage("role [{}] was not found", roleId), e);
} else {
logger.error("failed to load role [{}]", e, roleId);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to load role [{}]", roleId), e);
}
}
@ -462,7 +466,9 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
GetRequest request = client.prepareGet(SecurityTemplateService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, role).request();
client.get(request, listener);
} catch (IndexNotFoundException e) {
logger.trace("unable to retrieve role [{}] since security index does not exist", e, role);
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"unable to retrieve role [{}] since security index does not exist", role), e);
listener.onResponse(new GetResponse(
new GetResult(SecurityTemplateService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, role, -1, false, null, null)));
} catch (Exception e) {
@ -482,7 +488,8 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
@Override
public void onFailure(Exception t) {
// Not really much to do here except for warn about it...
logger.warn("failed to clear scroll [{}] after retrieving roles", t, scrollId);
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage("failed to clear scroll [{}] after retrieving roles", scrollId), t);
}
});
}
@ -522,7 +529,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
@Override
public void onFailure(Exception e) {
logger.error("unable to clear cache for role [{}]", e, role);
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to clear cache for role [{}]", role), e);
ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + role
+ "] failed. please clear the role cache manually", e);
listener.onFailure(exception);
@ -536,8 +543,8 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
final boolean exists = event.state().metaData().indices().get(SecurityTemplateService.SECURITY_INDEX_NAME) != null;
// make sure all the primaries are active
if (exists && event.state().routingTable().index(SecurityTemplateService.SECURITY_INDEX_NAME).allPrimaryShardsActive()) {
logger.debug("security index [{}] all primary shards started, so polling can start",
SecurityTemplateService.SECURITY_INDEX_NAME);
logger.debug(
"security index [{}] all primary shards started, so polling can start", SecurityTemplateService.SECURITY_INDEX_NAME);
securityIndexExists = true;
} else {
// always set the value - it may have changed...
@ -562,7 +569,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
try {
return RoleDescriptor.parse(name, sourceBytes);
} catch (Exception e) {
logger.error("error in the format of data for role [{}]", e, name);
logger.error((Supplier<?>) () -> new ParameterizedMessage("error in the format of data for role [{}]", name), e);
return null;
}
}

View File

@ -5,24 +5,26 @@
*/
package org.elasticsearch.xpack.security.rest;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.http.netty3.Netty3HttpRequest;
import org.elasticsearch.http.netty4.Netty4HttpRequest;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestFilter;
import org.elasticsearch.rest.RestFilterChain;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.security.authc.AuthenticationService;
import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.xpack.security.transport.netty3.SecurityNetty3HttpServerTransport;
import org.elasticsearch.threadpool.ThreadPool;
import org.jboss.netty.handler.ssl.SslHandler;
import javax.net.ssl.SSLEngine;
@ -36,7 +38,7 @@ import java.security.cert.X509Certificate;
public class SecurityRestFilter extends RestFilter {
private final AuthenticationService service;
private final ESLogger logger;
private final Logger logger;
private final XPackLicenseState licenseState;
private final ThreadContext threadContext;
private final boolean extractClientCertificate;
@ -76,7 +78,7 @@ public class SecurityRestFilter extends RestFilter {
filterChain.continueProcessing(request, channel, client);
}
static void putClientCertificateInContext(RestRequest request, ThreadContext threadContext, ESLogger logger) throws Exception {
static void putClientCertificateInContext(RestRequest request, ThreadContext threadContext, Logger logger) throws Exception {
assert request instanceof Netty3HttpRequest || request instanceof Netty4HttpRequest;
if (request instanceof Netty3HttpRequest) {
Netty3HttpRequest nettyHttpRequest = (Netty3HttpRequest) request;
@ -94,7 +96,7 @@ public class SecurityRestFilter extends RestFilter {
}
private static void extractClientCerts(SSLEngine sslEngine, Object channel, ThreadContext threadContext, ESLogger logger) {
private static void extractClientCerts(SSLEngine sslEngine, Object channel, ThreadContext threadContext, Logger logger) {
try {
Certificate[] certs = sslEngine.getSession().getPeerCertificates();
if (certs instanceof X509Certificate[]) {
@ -106,7 +108,8 @@ public class SecurityRestFilter extends RestFilter {
assert sslEngine.getNeedClientAuth() == false;
assert sslEngine.getWantClientAuth();
if (logger.isTraceEnabled()) {
logger.trace("SSL Peer did not present a certificate on channel [{}]", e, channel);
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage("SSL Peer did not present a certificate on channel [{}]", channel), e);
} else if (logger.isDebugEnabled()) {
logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
}

View File

@ -5,13 +5,9 @@
*/
package org.elasticsearch.xpack.security.transport;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLPeerUnverifiedException;
import java.io.IOException;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.transport.DelegatingTransportChannel;
@ -26,6 +22,12 @@ import org.elasticsearch.xpack.security.authz.AuthorizationService;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.handler.ssl.SslHandler;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLPeerUnverifiedException;
import java.io.IOException;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import static org.elasticsearch.xpack.security.support.Exceptions.authenticationError;
/**
@ -48,7 +50,7 @@ public interface ServerTransportFilter {
* request is properly authenticated and authorized
*/
class NodeProfile implements ServerTransportFilter {
private static final ESLogger logger = Loggers.getLogger(NodeProfile.class);
private static final Logger logger = Loggers.getLogger(NodeProfile.class);
private final AuthenticationService authcService;
private final AuthorizationService authzService;
@ -113,7 +115,9 @@ public interface ServerTransportFilter {
assert sslEngine.getNeedClientAuth() == false;
assert sslEngine.getWantClientAuth();
if (logger.isTraceEnabled()) {
logger.trace("SSL Peer did not present a certificate on channel [{}]", e, channel);
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"SSL Peer did not present a certificate on channel [{}]", channel), e);
} else if (logger.isDebugEnabled()) {
logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
}

View File

@ -6,10 +6,10 @@
package org.elasticsearch.xpack.security.transport.filter;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
@ -96,7 +96,7 @@ public class IPFilter {
private final XPackLicenseState licenseState;
private final boolean alwaysAllowBoundAddresses;
private final ESLogger logger;
private final Logger logger;
private volatile Map<String, SecurityIpFilterRule[]> rules = Collections.emptyMap();
private volatile boolean isIpFilterEnabled;
private volatile boolean isHttpFilterEnabled;

View File

@ -5,7 +5,9 @@
*/
package org.elasticsearch.xpack.security.transport.netty3;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
@ -31,7 +33,7 @@ import java.util.Queue;
*/
public class Netty3HandshakeWaitingHandler extends SimpleChannelHandler {
private final ESLogger logger;
private final Logger logger;
private boolean handshaken = false;
private Queue<MessageEvent> pendingWrites = new LinkedList<>();
@ -39,7 +41,7 @@ public class Netty3HandshakeWaitingHandler extends SimpleChannelHandler {
/**
* @param logger We pass a context aware logger here (logger that is aware of the node name &amp; env)
*/
public Netty3HandshakeWaitingHandler(ESLogger logger) {
public Netty3HandshakeWaitingHandler(Logger logger) {
this.logger = logger;
}
@ -69,7 +71,9 @@ public class Netty3HandshakeWaitingHandler extends SimpleChannelHandler {
} else {
Throwable cause = handshakeFuture.getCause();
if (logger.isDebugEnabled()) {
logger.debug("SSL/TLS handshake failed, closing channel: {}", cause, cause.getMessage());
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"SSL/TLS handshake failed, closing channel: {}", cause.getMessage()), cause);
} else {
logger.error("SSL/TLS handshake failed, closing channel: {}", cause.getMessage());
}

View File

@ -5,6 +5,8 @@
*/
package org.elasticsearch.xpack.security.transport.netty3;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Setting;
@ -12,10 +14,10 @@ import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.http.netty3.Netty3HttpServerTransport;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.security.ssl.SSLService;
import org.elasticsearch.xpack.security.transport.SSLClientAuth;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import org.elasticsearch.threadpool.ThreadPool;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
@ -23,7 +25,6 @@ import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.handler.ssl.SslHandler;
import javax.net.ssl.SSLEngine;
import java.util.List;
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
@ -74,14 +75,18 @@ public class SecurityNetty3HttpServerTransport extends Netty3HttpServerTransport
Throwable t = e.getCause();
if (isNotSslRecordException(t)) {
if (logger.isTraceEnabled()) {
logger.trace("received plaintext http traffic on a https channel, closing connection {}", t, ctx.getChannel());
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"received plaintext http traffic on a https channel, closing connection {}",
ctx.getChannel()),
t);
} else {
logger.warn("received plaintext http traffic on a https channel, closing connection {}", ctx.getChannel());
}
ctx.getChannel().close();
} else if (isCloseDuringHandshakeException(t)) {
if (logger.isTraceEnabled()) {
logger.trace("connection {} closed during handshake", t, ctx.getChannel());
logger.trace((Supplier<?>) () -> new ParameterizedMessage("connection {} closed during handshake", ctx.getChannel()), t);
} else {
logger.warn("connection {} closed during handshake", ctx.getChannel());
}

View File

@ -5,6 +5,8 @@
*/
package org.elasticsearch.xpack.security.transport.netty3;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.internal.Nullable;
@ -15,11 +17,11 @@ import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.netty3.Netty3Transport;
import org.elasticsearch.xpack.security.ssl.SSLService;
import org.elasticsearch.xpack.security.transport.SSLClientAuth;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.netty3.Netty3Transport;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
@ -126,14 +128,16 @@ public class SecurityNetty3Transport extends Netty3Transport {
protected void onException(Channel channel, Exception e) throws IOException {
if (isNotSslRecordException(e)) {
if (logger.isTraceEnabled()) {
logger.trace("received plaintext traffic on a encrypted channel, closing connection {}", e, channel);
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"received plaintext traffic on a encrypted channel, closing connection {}", channel), e);
} else {
logger.warn("received plaintext traffic on a encrypted channel, closing connection {}", channel);
}
disconnectFromNodeChannel(channel, e);
} else if (isCloseDuringHandshakeException(e)) {
if (logger.isTraceEnabled()) {
logger.trace("connection {} closed during handshake", e, channel);
logger.trace((Supplier<?>) () -> new ParameterizedMessage("connection {} closed during handshake", channel), e);
} else {
logger.warn("connection {} closed during handshake", channel);
}

View File

@ -9,6 +9,8 @@ import io.netty.channel.Channel;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.ssl.SslHandler;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Setting;
@ -22,7 +24,6 @@ import org.elasticsearch.xpack.security.transport.SSLClientAuth;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import javax.net.ssl.SSLEngine;
import java.util.List;
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
@ -69,14 +70,18 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport
if (isNotSslRecordException(cause)) {
if (logger.isTraceEnabled()) {
logger.trace("received plaintext http traffic on a https channel, closing connection {}", cause, ctx.channel());
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"received plaintext http traffic on a https channel, closing connection {}",
ctx.channel()),
cause);
} else {
logger.warn("received plaintext http traffic on a https channel, closing connection {}", ctx.channel());
}
ctx.channel().close();
} else if (isCloseDuringHandshakeException(cause)) {
if (logger.isTraceEnabled()) {
logger.trace("connection {} closed during handshake", cause, ctx.channel());
logger.trace((Supplier<?>) () -> new ParameterizedMessage("connection {} closed during handshake", ctx.channel()), cause);
} else {
logger.warn("connection {} closed during handshake", ctx.channel());
}

View File

@ -12,12 +12,12 @@ import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.elasticsearch.xpack.security.authc.activedirectory.ActiveDirectoryRealm;
import org.elasticsearch.xpack.security.authc.ldap.LdapRealm;
import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.security.transport.netty3.SecurityNetty3Transport;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -31,7 +31,6 @@ import static org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScop
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER;
import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
/**

View File

@ -5,156 +5,112 @@
*/
package org.elasticsearch.xpack.security.audit.logfile;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.logging.log4j.core.filter.RegexFilter;
import org.apache.logging.log4j.core.impl.MutableLogEvent;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.logging.TestLoggers;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
public class CapturingLogger {
/**
*
*/
public class CapturingLogger extends ESLogger {
private Level level;
public final List<Msg> error = new ArrayList<>();
public final List<Msg> warn = new ArrayList<>();
public final List<Msg> info = new ArrayList<>();
public final List<Msg> debug = new ArrayList<>();
public final List<Msg> trace = new ArrayList<>();
public CapturingLogger(Level level) {
super(null, null);
this.level = level;
public static Logger newCapturingLogger(final Level level) throws IllegalAccessException {
final StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
final String name = caller.getClassName() + "." + caller.getMethodName() + "." + level.toString();
final Logger logger = ESLoggerFactory.getLogger(name);
Loggers.setLevel(logger, level);
TestLoggers.addAppender(logger, new MockAppender(name));
return logger;
}
@Override
public void trace(String msg, Throwable cause, Object... params) {
if (isTraceEnabled()) {
add(trace, format(msg, params), cause);
private static MockAppender getMockAppender(final String name) {
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
final Configuration config = ctx.getConfiguration();
final LoggerConfig loggerConfig = config.getLoggerConfig(name);
return (MockAppender) loggerConfig.getAppenders().get(name);
}
public static boolean isEmpty(final String name) {
final MockAppender appender = getMockAppender(name);
return appender.isEmpty();
}
public static List<String> output(final String name, final Level level) {
final MockAppender appender = getMockAppender(name);
return appender.output(level);
}
private static class MockAppender extends AbstractAppender {
public final List<String> error = new ArrayList<>();
public final List<String> warn = new ArrayList<>();
public final List<String> info = new ArrayList<>();
public final List<String> debug = new ArrayList<>();
public final List<String> trace = new ArrayList<>();
private MockAppender(final String name) throws IllegalAccessException {
super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], true, null, null), null);
}
@Override
public void append(LogEvent event) {
switch (event.getLevel().toString()) {
// we can not keep a reference to the event here because Log4j is using a thread
// local instance under the hood
case "ERROR":
error.add(event.getMessage().getFormattedMessage());
break;
case "WARN":
warn.add(event.getMessage().getFormattedMessage());
break;
case "INFO":
info.add(event.getMessage().getFormattedMessage());
break;
case "DEBUG":
debug.add(event.getMessage().getFormattedMessage());
break;
case "TRACE":
trace.add(event.getMessage().getFormattedMessage());
break;
default:
throw invalidLevelException(event.getLevel());
}
}
private IllegalArgumentException invalidLevelException(Level level) {
return new IllegalArgumentException("invalid level, expected [ERROR|WARN|INFO|DEBUG|TRACE] but was [" + level + "]");
}
public boolean isEmpty() {
return error.isEmpty() && warn.isEmpty() && info.isEmpty() && debug.isEmpty() && trace.isEmpty();
}
public List<String> output(Level level) {
switch (level.toString()) {
case "ERROR":
return error;
case "WARN":
return warn;
case "INFO":
return info;
case "DEBUG":
return debug;
case "TRACE":
return trace;
default:
throw invalidLevelException(level);
}
}
}
@Override
public void debug(String msg, Throwable cause, Object... params) {
if (isDebugEnabled()) {
add(debug, format(msg, params), cause);
}
}
@Override
public void info(String msg, Throwable cause, Object... params) {
if (isInfoEnabled()) {
add(info, format(msg, params), cause);
}
}
@Override
public void warn(String msg, Throwable cause, Object... params) {
if (isWarnEnabled()) {
add(warn, format(msg, params), cause);
}
}
@Override
public void error(String msg, Throwable cause, Object... params) {
if (isErrorEnabled()) {
add(error, format(msg, params), cause);
}
}
@Override
public String getName() {
return "capturing";
}
@Override
public void setLevel(String level) {
this.level = Level.resolve(level);
}
@Override
public String getLevel() {
return level.name().toLowerCase(Locale.ROOT);
}
public Level level() {
return level;
}
@Override
public boolean isTraceEnabled() {
return level.enabled(Level.TRACE);
}
@Override
public boolean isDebugEnabled() {
return level.enabled(Level.DEBUG);
}
@Override
public boolean isInfoEnabled() {
return level.enabled(Level.INFO);
}
@Override
public boolean isWarnEnabled() {
return level.enabled(Level.WARN);
}
@Override
public boolean isErrorEnabled() {
return level.enabled(Level.ERROR);
}
public List<Msg> output(Level level) {
switch (level) {
case ERROR: return error;
case WARN: return warn;
case INFO: return info;
case DEBUG: return debug;
case TRACE: return trace;
default:
return null; // can never happen
}
}
private static void add(List<Msg> list, String text, Throwable t) {
list.add(new Msg(text, t));
}
public boolean isEmpty() {
return error.isEmpty() && warn.isEmpty() && info.isEmpty() && debug.isEmpty() && trace.isEmpty();
}
public static class Msg {
public String text;
public Throwable t;
public Msg(String text, Throwable t) {
this.text = text;
this.t = t;
}
}
public enum Level {
ERROR(0), WARN(1), INFO(2), DEBUG(3), TRACE(4);
private final int value;
private Level(int value) {
this.value = value;
}
public boolean enabled(Level other) {
return value >= other.value;
}
private static Level resolve(String level) {
return Level.valueOf(level.toUpperCase(Locale.ROOT));
}
}
}

View File

@ -5,6 +5,8 @@
*/
package org.elasticsearch.xpack.security.audit.logfile;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.node.DiscoveryNode;
@ -22,7 +24,6 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.TransportMessage;
import org.elasticsearch.xpack.security.audit.AuditUtil;
import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger.Level;
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
import org.elasticsearch.xpack.security.rest.RemoteHostHeader;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
@ -43,7 +44,8 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class LoggingAuditTrailTests extends ESTestCase {
private static enum RestContent {
private enum RestContent {
VALID() {
@Override
protected boolean hasContent() {
@ -121,17 +123,17 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testAnonymousAccessDeniedTransport() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, clusterService.localNode(), threadContext);
auditTrail.anonymousAccessDenied("_action", message);
switch (level) {
case ERROR:
switch (level.toString()) {
case "ERROR":
assertEmptyLog(logger);
break;
case WARN:
case INFO:
case "WARN":
case "INFO":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.WARN, prefix + "[transport] [anonymous_access_denied]\t" + origins +
", action=[_action], indices=[" + indices(message) + "]");
@ -139,8 +141,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
assertMsg(logger, Level.WARN, prefix + "[transport] [anonymous_access_denied]\t" + origins + ", action=[_action]");
}
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.DEBUG, prefix + "[transport] [anonymous_access_denied]\t" + origins +
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
@ -161,20 +163,20 @@ public class LoggingAuditTrailTests extends ESTestCase {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.anonymousAccessDenied(request);
switch (level) {
case ERROR:
switch (level.toString()) {
case "ERROR":
assertEmptyLog(logger);
break;
case WARN:
case INFO:
case "WARN":
case "INFO":
assertMsg(logger, Level.WARN, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" +
NetworkAddress.format(address) + "], uri=[_uri]");
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
assertMsg(logger, Level.DEBUG, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" +
NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]");
}
@ -184,15 +186,15 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testAuthenticationFailed() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);;
auditTrail.authenticationFailed(new MockToken(), "_action", message);
switch (level) {
case ERROR:
case WARN:
case INFO:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.ERROR, prefix + "[transport] [authentication_failed]\t" + origins +
", principal=[_principal], action=[_action], indices=[" + indices(message) + "]");
@ -201,8 +203,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
", principal=[_principal], action=[_action]");
}
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.DEBUG, prefix + "[transport] [authentication_failed]\t" + origins +
", principal=[_principal], action=[_action], indices=[" + indices(message) +
@ -218,15 +220,15 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testAuthenticationFailedNoToken() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);;
auditTrail.authenticationFailed("_action", message);
switch (level) {
case ERROR:
case WARN:
case INFO:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.ERROR, prefix + "[transport] [authentication_failed]\t" + origins +
", action=[_action], indices=[" + indices(message) + "]");
@ -235,8 +237,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
", action=[_action]");
}
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.DEBUG, prefix + "[transport] [authentication_failed]\t" + origins +
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
@ -256,18 +258,18 @@ public class LoggingAuditTrailTests extends ESTestCase {
when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200));
when(request.uri()).thenReturn("_uri");
String expectedMessage = prepareRestContent(request);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.authenticationFailed(new MockToken(), request);
switch (level) {
case ERROR:
case WARN:
case INFO:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
assertMsg(logger, Level.ERROR, prefix + "[rest] [authentication_failed]\torigin_address=[" +
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]");
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
assertMsg(logger, Level.DEBUG, prefix + "[rest] [authentication_failed]\torigin_address=[" +
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" +
expectedMessage + "]");
@ -283,18 +285,18 @@ public class LoggingAuditTrailTests extends ESTestCase {
when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200));
when(request.uri()).thenReturn("_uri");
String expectedMessage = prepareRestContent(request);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.authenticationFailed(request);
switch (level) {
case ERROR:
case WARN:
case INFO:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
assertMsg(logger, Level.ERROR, prefix + "[rest] [authentication_failed]\torigin_address=[" +
NetworkAddress.format(address) + "], uri=[_uri]");
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
assertMsg(logger, Level.DEBUG, prefix + "[rest] [authentication_failed]\torigin_address=[" +
NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]");
}
@ -304,19 +306,19 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testAuthenticationFailedRealm() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);;
auditTrail.authenticationFailed("_realm", new MockToken(), "_action", message);
switch (level) {
case ERROR:
case WARN:
case INFO:
case DEBUG:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
case "DEBUG":
assertEmptyLog(logger);
break;
case TRACE:
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.TRACE, prefix + "[transport] [authentication_failed]\trealm=[_realm], " + origins +
", principal=[_principal], action=[_action], indices=[" + indices(message) + "], " +
@ -337,17 +339,17 @@ public class LoggingAuditTrailTests extends ESTestCase {
when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200));
when(request.uri()).thenReturn("_uri");
String expectedMessage = prepareRestContent(request);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.authenticationFailed("_realm", new MockToken(), request);
switch (level) {
case ERROR:
case WARN:
case INFO:
case DEBUG:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
case "DEBUG":
assertEmptyLog(logger);
break;
case TRACE:
case "TRACE":
assertMsg(logger, Level.TRACE, prefix + "[rest] [authentication_failed]\trealm=[_realm], origin_address=[" +
NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" +
expectedMessage + "]");
@ -358,7 +360,7 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testAccessGranted() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);
@ -372,12 +374,12 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]";
auditTrail.accessGranted(user, "_action", message);
switch (level) {
case ERROR:
case WARN:
switch (level.toString()) {
case "ERROR":
case "WARN":
assertEmptyLog(logger);
break;
case INFO:
case "INFO":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo +
", action=[_action], indices=[" + indices(message) + "]");
@ -386,8 +388,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
", action=[_action]");
}
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.DEBUG, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo +
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
@ -402,19 +404,19 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testAccessGrantedInternalSystemAction() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);
auditTrail.accessGranted(SystemUser.INSTANCE, "internal:_action", message);
switch (level) {
case ERROR:
case WARN:
case INFO:
case DEBUG:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
case "DEBUG":
assertEmptyLog(logger);
break;
case TRACE:
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.TRACE, prefix + "[transport] [access_granted]\t" + origins + ", principal=[" +
SystemUser.INSTANCE.principal()
@ -430,7 +432,7 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testAccessGrantedInternalSystemActionNonSystemUser() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);
@ -444,12 +446,12 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]";
auditTrail.accessGranted(user, "internal:_action", message);
switch (level) {
case ERROR:
case WARN:
switch (level.toString()) {
case "ERROR":
case "WARN":
assertEmptyLog(logger);
break;
case INFO:
case "INFO":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo +
", action=[internal:_action], indices=[" + indices(message) + "]");
@ -458,8 +460,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
", action=[internal:_action]");
}
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.DEBUG, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo +
", action=[internal:_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
@ -474,7 +476,7 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testAccessDenied() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);
@ -488,10 +490,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
String userInfo = runAs ? "principal=[running as], run_by_principal=[_username]" : "principal=[_username]";
auditTrail.accessDenied(user, "_action", message);
switch (level) {
case ERROR:
case WARN:
case INFO:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.ERROR, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo +
", action=[_action], indices=[" + indices(message) + "]");
@ -500,8 +502,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
", action=[_action]");
}
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.DEBUG, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo +
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
@ -522,18 +524,18 @@ public class LoggingAuditTrailTests extends ESTestCase {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.tamperedRequest(request);
switch (level) {
case ERROR:
case WARN:
case INFO:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
assertMsg(logger, Level.ERROR, prefix + "[rest] [tampered_request]\torigin_address=[" +
NetworkAddress.format(address) + "], uri=[_uri]");
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
assertMsg(logger, Level.DEBUG, prefix + "[rest] [tampered_request]\torigin_address=[" +
NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]");
}
@ -546,13 +548,13 @@ public class LoggingAuditTrailTests extends ESTestCase {
threadContext = new ThreadContext(Settings.EMPTY);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.tamperedRequest(action, message);
switch (level) {
case ERROR:
case WARN:
case INFO:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.ERROR, prefix + "[transport] [tampered_request]\t" + origins +
", action=[_action], indices=[" + indices(message) + "]");
@ -560,8 +562,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
assertMsg(logger, Level.ERROR, prefix + "[transport] [tampered_request]\t" + origins + ", action=[_action]");
}
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.DEBUG, prefix + "[transport] [tampered_request]\t" + origins +
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
@ -587,13 +589,13 @@ public class LoggingAuditTrailTests extends ESTestCase {
threadContext = new ThreadContext(Settings.EMPTY);
TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.tamperedRequest(user, action, message);
switch (level) {
case ERROR:
case WARN:
case INFO:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.ERROR, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo +
", action=[_action], indices=[" + indices(message) + "]");
@ -602,8 +604,8 @@ public class LoggingAuditTrailTests extends ESTestCase {
", action=[_action]");
}
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
if (message instanceof IndicesRequest) {
assertMsg(logger, Level.DEBUG, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo +
", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]");
@ -618,21 +620,21 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testConnectionDenied() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
InetAddress inetAddress = InetAddress.getLoopbackAddress();
SecurityIpFilterRule rule = new SecurityIpFilterRule(false, "_all");
auditTrail.connectionDenied(inetAddress, "default", rule);
switch (level) {
case ERROR:
switch (level.toString()) {
case "ERROR":
assertMsg(logger, Level.ERROR, String.format(Locale.ROOT, prefix +
"[ip_filter] [connection_denied]\torigin_address=[%s], transport_profile=[%s], rule=[deny %s]",
NetworkAddress.format(inetAddress), "default", "_all"));
break;
case WARN:
case INFO:
case DEBUG:
case TRACE:
case "WARN":
case "INFO":
case "DEBUG":
case "TRACE":
}
}
}
@ -640,19 +642,19 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testConnectionGranted() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
InetAddress inetAddress = InetAddress.getLoopbackAddress();
SecurityIpFilterRule rule = IPFilter.DEFAULT_PROFILE_ACCEPT_ALL;
auditTrail.connectionGranted(inetAddress, "default", rule);
switch (level) {
case ERROR:
case WARN:
case INFO:
case DEBUG:
switch (level.toString()) {
case "ERROR":
case "WARN":
case "INFO":
case "DEBUG":
assertEmptyLog(logger);
break;
case TRACE:
case "TRACE":
assertMsg(logger, Level.TRACE, String.format(Locale.ROOT, prefix + "[ip_filter] " +
"[connection_granted]\torigin_address=[%s], transport_profile=[default], rule=[allow default:accept_all]",
NetworkAddress.format(inetAddress)));
@ -663,23 +665,23 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testRunAsGranted() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = new MockMessage(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);
User user = new User("_username", new String[]{"r1"}, new User("running as", new String[] {"r2"}));
auditTrail.runAsGranted(user, "_action", message);
switch (level) {
case ERROR:
case WARN:
switch (level.toString()) {
case "ERROR":
case "WARN":
assertEmptyLog(logger);
break;
case INFO:
case "INFO":
assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_granted]\t" + origins +
", principal=[_username], run_as_principal=[running as], action=[_action]");
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
assertMsg(logger, Level.DEBUG, prefix + "[transport] [run_as_granted]\t" + origins +
", principal=[_username], run_as_principal=[running as], action=[_action], request=[MockMessage]");
}
@ -689,23 +691,23 @@ public class LoggingAuditTrailTests extends ESTestCase {
public void testRunAsDenied() throws Exception {
for (Level level : Level.values()) {
threadContext = new ThreadContext(Settings.EMPTY);
CapturingLogger logger = new CapturingLogger(level);
Logger logger = CapturingLogger.newCapturingLogger(level);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
TransportMessage message = new MockMessage(threadContext);
String origins = LoggingAuditTrail.originAttributes(message, localNode, threadContext);
User user = new User("_username", new String[]{"r1"}, new User("running as", new String[] {"r2"}));
auditTrail.runAsDenied(user, "_action", message);
switch (level) {
case ERROR:
case WARN:
switch (level.toString()) {
case "ERROR":
case "WARN":
assertEmptyLog(logger);
break;
case INFO:
case "INFO":
assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_denied]\t" + origins +
", principal=[_username], run_as_principal=[running as], action=[_action]");
break;
case DEBUG:
case TRACE:
case "DEBUG":
case "TRACE":
assertMsg(logger, Level.DEBUG, prefix + "[transport] [run_as_denied]\t" + origins +
", principal=[_username], run_as_principal=[running as], action=[_action], request=[MockMessage]");
}
@ -736,14 +738,14 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
}
private void assertMsg(CapturingLogger logger, Level msgLevel, String msg) {
List<CapturingLogger.Msg> output = logger.output(msgLevel);
private void assertMsg(Logger logger, Level level, String message) {
List<String> output = CapturingLogger.output(logger.getName(), level);
assertThat(output.size(), is(1));
assertThat(output.get(0).text, equalTo(msg));
assertThat(output.get(0), equalTo(message));
}
private void assertEmptyLog(CapturingLogger logger) {
assertThat(logger.isEmpty(), is(true));
private void assertEmptyLog(Logger logger) {
assertThat(CapturingLogger.isEmpty(logger.getName()), is(true));
}
private String prepareRestContent(RestRequest mock) {
@ -825,4 +827,5 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
}
}

View File

@ -5,17 +5,20 @@
*/
package org.elasticsearch.xpack.security.authc.file;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.authc.support.Hasher;
import org.elasticsearch.xpack.security.authc.support.RefreshListener;
import org.elasticsearch.xpack.security.authc.support.SecuredStringTests;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import org.junit.After;
import org.junit.Before;
@ -40,9 +43,6 @@ import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
*
*/
public class FileUserPasswdStoreTests extends ESTestCase {
private Settings settings;
@ -173,17 +173,17 @@ public class FileUserPasswdStoreTests extends ESTestCase {
public void testParseFile_Empty() throws Exception {
Path empty = createTempFile();
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.DEBUG);
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG);
Map<String, char[]> users = FileUserPasswdStore.parseFile(empty, logger);
assertThat(users.isEmpty(), is(true));
List<CapturingLogger.Msg> msgs = logger.output(CapturingLogger.Level.DEBUG);
assertThat(msgs.size(), is(1));
assertThat(msgs.get(0).text, containsString("parsed [0] users"));
List<String> events = CapturingLogger.output(logger.getName(), Level.DEBUG);
assertThat(events.size(), is(1));
assertThat(events.get(0), containsString("parsed [0] users"));
}
public void testParseFile_WhenFileDoesNotExist() throws Exception {
Path file = createTempDir().resolve(randomAsciiOfLength(10));
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
Map<String, char[]> users = FileUserPasswdStore.parseFile(file, logger);
assertThat(users, notNullValue());
assertThat(users.isEmpty(), is(true));
@ -193,7 +193,7 @@ public class FileUserPasswdStoreTests extends ESTestCase {
Path file = createTempFile();
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
try {
FileUserPasswdStore.parseFile(file, logger);
fail("expected a parse failure");
@ -214,13 +214,13 @@ public class FileUserPasswdStoreTests extends ESTestCase {
Path file = createTempFile();
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
Map<String, char[]> users = FileUserPasswdStore.parseFileLenient(file, logger);
assertThat(users, notNullValue());
assertThat(users.isEmpty(), is(true));
List<CapturingLogger.Msg> msgs = logger.output(CapturingLogger.Level.ERROR);
assertThat(msgs.size(), is(1));
assertThat(msgs.get(0).text, containsString("failed to parse users file"));
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
assertThat(events.size(), is(1));
assertThat(events.get(0), containsString("failed to parse users file"));
}
public void testParseFileWithLineWithEmptyPasswordAndWhitespace() throws Exception {
@ -230,4 +230,5 @@ public class FileUserPasswdStoreTests extends ESTestCase {
assertThat(users, notNullValue());
assertThat(users.keySet(), is(empty()));
}
}

View File

@ -5,19 +5,21 @@
*/
package org.elasticsearch.xpack.security.authc.file;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.XPackSettings;
import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger;
import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger.Level;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.authc.support.RefreshListener;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.XPackSettings;
import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger;
import org.elasticsearch.xpack.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.authc.support.RefreshListener;
import org.junit.After;
import org.junit.Before;
@ -44,6 +46,7 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
public class FileUserRolesStoreTests extends ESTestCase {
private Settings settings;
private Environment env;
private ThreadPool threadPool;
@ -182,16 +185,16 @@ public class FileUserRolesStoreTests extends ESTestCase {
public void testParseFileEmpty() throws Exception {
Path empty = createTempFile();
CapturingLogger log = new CapturingLogger(Level.DEBUG);
Logger log = CapturingLogger.newCapturingLogger(Level.DEBUG);
FileUserRolesStore.parseFile(empty, log);
List<CapturingLogger.Msg> msgs = log.output(CapturingLogger.Level.DEBUG);
assertThat(msgs.size(), is(1));
assertThat(msgs.get(0).text, containsString("parsed [0] user to role mappings"));
List<String> events = CapturingLogger.output(log.getName(), Level.DEBUG);
assertThat(events.size(), is(1));
assertThat(events.get(0), containsString("parsed [0] user to role mappings"));
}
public void testParseFileWhenFileDoesNotExist() throws Exception {
Path file = createTempDir().resolve(randomAsciiOfLength(10));
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
Map<String, String[]> usersRoles = FileUserRolesStore.parseFile(file, logger);
assertThat(usersRoles, notNullValue());
assertThat(usersRoles.isEmpty(), is(true));
@ -204,7 +207,7 @@ public class FileUserRolesStoreTests extends ESTestCase {
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, lines, StandardCharsets.UTF_16);
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG);
try {
FileUserRolesStore.parseFile(file, logger);
fail("expected a parse failure");
@ -261,13 +264,13 @@ public class FileUserRolesStoreTests extends ESTestCase {
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, lines, StandardCharsets.UTF_16);
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG);
Map<String, String[]> usersRoles = FileUserRolesStore.parseFileLenient(file, logger);
assertThat(usersRoles, notNullValue());
assertThat(usersRoles.isEmpty(), is(true));
List<CapturingLogger.Msg> msgs = logger.output(CapturingLogger.Level.ERROR);
assertThat(msgs.size(), is(1));
assertThat(msgs.get(0).text, containsString("failed to parse users_roles file"));
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
assertThat(events.size(), is(1));
assertThat(events.get(0), containsString("failed to parse users_roles file"));
}
private Path writeUsersRoles(String input) throws Exception {
@ -289,4 +292,5 @@ public class FileUserRolesStoreTests extends ESTestCase {
String reason = String.format(Locale.ROOT, "Expected userRoles to be empty, but was %s", usersRoles.keySet());
assertThat(reason, usersRoles.keySet(), hasSize(0));
}
}

View File

@ -6,6 +6,9 @@
package org.elasticsearch.xpack.security.authc.support;
import com.unboundid.ldap.sdk.DN;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger;
@ -166,7 +169,7 @@ public class DnRoleMapperTests extends ESTestCase {
public void testParseFile() throws Exception {
Path file = getDataPath("role_mapping.yml");
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
Map<DN, Set<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name");
assertThat(mappings, notNullValue());
assertThat(mappings.size(), is(3));
@ -196,18 +199,18 @@ public class DnRoleMapperTests extends ESTestCase {
public void testParseFile_Empty() throws Exception {
Path file = createTempDir().resolve("foo.yaml");
Files.createFile(file);
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.DEBUG);
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG);
Map<DN, Set<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name");
assertThat(mappings, notNullValue());
assertThat(mappings.isEmpty(), is(true));
List<CapturingLogger.Msg> msgs = logger.output(CapturingLogger.Level.DEBUG);
assertThat(msgs.size(), is(1));
assertThat(msgs.get(0).text, containsString("[0] role mappings found"));
List<String> events = CapturingLogger.output(logger.getName(), Level.DEBUG);
assertThat(events.size(), is(1));
assertThat(events.get(0), containsString("[0] role mappings found"));
}
public void testParseFile_WhenFileDoesNotExist() throws Exception {
Path file = createTempDir().resolve(randomAsciiOfLength(10));
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
Map<DN, Set<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name");
assertThat(mappings, notNullValue());
assertThat(mappings.isEmpty(), is(true));
@ -217,7 +220,7 @@ public class DnRoleMapperTests extends ESTestCase {
Path file = createTempFile();
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
try {
DnRoleMapper.parseFile(file, logger, "_type", "_name");
fail("expected a parse failure");
@ -230,13 +233,13 @@ public class DnRoleMapperTests extends ESTestCase {
Path file = createTempFile();
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
Map<DN, Set<String>> mappings = DnRoleMapper.parseFileLenient(file, logger, "_type", "_name");
assertThat(mappings, notNullValue());
assertThat(mappings.isEmpty(), is(true));
List<CapturingLogger.Msg> msgs = logger.output(CapturingLogger.Level.ERROR);
assertThat(msgs.size(), is(1));
assertThat(msgs.get(0).text, containsString("failed to parse role mappings file"));
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
assertThat(events.size(), is(1));
assertThat(events.get(0), containsString("failed to parse role mappings file"));
}
public void testYaml() throws Exception {

View File

@ -5,6 +5,9 @@
*/
package org.elasticsearch.xpack.security.authz.store;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.XPackPlugin;
@ -48,9 +51,6 @@ import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;
import static org.mockito.Mockito.mock;
/**
*
*/
public class FileRolesStoreTests extends ESTestCase {
public void testParseFile() throws Exception {
@ -207,7 +207,7 @@ public class FileRolesStoreTests extends ESTestCase {
public void testParseFileWithFLSAndDLSDisabled() throws Exception {
Path path = getDataPath("roles.yml");
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.ERROR);
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR);
Map<String, Role> roles = FileRolesStore.parseFile(path, logger, Settings.builder()
.put(XPackSettings.DLS_FLS_ENABLED.getKey(), false)
.build());
@ -217,14 +217,18 @@ public class FileRolesStoreTests extends ESTestCase {
assertThat(roles.get("role_query"), nullValue());
assertThat(roles.get("role_query_fields"), nullValue());
List<CapturingLogger.Msg> entries = logger.output(CapturingLogger.Level.ERROR);
assertThat(entries, hasSize(3));
assertThat(entries.get(0).text, startsWith("invalid role definition [role_fields] in roles file [" + path.toAbsolutePath() +
"]. document and field level security is not enabled."));
assertThat(entries.get(1).text, startsWith("invalid role definition [role_query] in roles file [" + path.toAbsolutePath() +
"]. document and field level security is not enabled."));
assertThat(entries.get(2).text, startsWith("invalid role definition [role_query_fields] in roles file [" + path.toAbsolutePath() +
"]. document and field level security is not enabled."));
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
assertThat(events, hasSize(3));
assertThat(
events.get(0),
startsWith("invalid role definition [role_fields] in roles file [" + path.toAbsolutePath() +
"]. document and field level security is not enabled."));
assertThat(events.get(1),
startsWith("invalid role definition [role_query] in roles file [" + path.toAbsolutePath() +
"]. document and field level security is not enabled."));
assertThat(events.get(2),
startsWith("invalid role definition [role_query_fields] in roles file [" + path.toAbsolutePath() +
"]. document and field level security is not enabled."));
}
/**
@ -310,7 +314,7 @@ public class FileRolesStoreTests extends ESTestCase {
public void testThatInvalidRoleDefinitions() throws Exception {
Path path = getDataPath("invalid_roles.yml");
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.ERROR);
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR);
Map<String, Role> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY);
assertThat(roles.size(), is(1));
assertThat(roles, hasKey("valid_role"));
@ -318,33 +322,37 @@ public class FileRolesStoreTests extends ESTestCase {
assertThat(role, notNullValue());
assertThat(role.name(), equalTo("valid_role"));
List<CapturingLogger.Msg> entries = logger.output(CapturingLogger.Level.ERROR);
List<String> entries = CapturingLogger.output(logger.getName(), Level.ERROR);
assertThat(entries, hasSize(6));
assertThat(entries.get(0).text, startsWith("invalid role definition [$dlk39] in roles file [" + path.toAbsolutePath() +
"]. invalid role name"));
assertThat(entries.get(1).text, startsWith("invalid role definition [role1] in roles file [" + path.toAbsolutePath() + "]"));
assertThat(entries.get(2).text, startsWith("failed to parse role [role2]"));
assertThat(entries.get(3).text, startsWith("failed to parse role [role3]"));
assertThat(entries.get(4).text, startsWith("failed to parse role [role4]"));
assertThat(entries.get(5).text, startsWith("failed to parse indices privileges for role [role5]"));
assertThat(
entries.get(0),
startsWith("invalid role definition [$dlk39] in roles file [" + path.toAbsolutePath() + "]. invalid role name"));
assertThat(
entries.get(1),
startsWith("invalid role definition [role1] in roles file [" + path.toAbsolutePath() + "]"));
assertThat(entries.get(2), startsWith("failed to parse role [role2]"));
assertThat(entries.get(3), startsWith("failed to parse role [role3]"));
assertThat(entries.get(4), startsWith("failed to parse role [role4]"));
assertThat(entries.get(5), startsWith("failed to parse indices privileges for role [role5]"));
}
public void testThatRoleNamesDoesNotResolvePermissions() throws Exception {
Path path = getDataPath("invalid_roles.yml");
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.ERROR);
Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR);
Set<String> roleNames = FileRolesStore.parseFileForRoleNames(path, logger);
assertThat(roleNames.size(), is(6));
assertThat(roleNames, containsInAnyOrder("valid_role", "role1", "role2", "role3", "role4", "role5"));
List<CapturingLogger.Msg> entries = logger.output(CapturingLogger.Level.ERROR);
assertThat(entries, hasSize(1));
assertThat(entries.get(0).text, startsWith("invalid role definition [$dlk39] in roles file [" + path.toAbsolutePath() +
"]. invalid role name"));
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);
assertThat(events, hasSize(1));
assertThat(
events.get(0),
startsWith("invalid role definition [$dlk39] in roles file [" + path.toAbsolutePath() + "]. invalid role name"));
}
public void testReservedRoles() throws Exception {
CapturingLogger logger = new CapturingLogger(CapturingLogger.Level.INFO);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
Path path = getDataPath("reserved_roles.yml");
Map<String, Role> roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY);
@ -353,14 +361,14 @@ public class FileRolesStoreTests extends ESTestCase {
assertThat(roles, hasKey("admin"));
List<CapturingLogger.Msg> messages = logger.output(CapturingLogger.Level.WARN);
assertThat(messages, notNullValue());
assertThat(messages, hasSize(4));
List<String> events = CapturingLogger.output(logger.getName(), Level.WARN);
assertThat(events, notNullValue());
assertThat(events, hasSize(4));
// the system role will always be checked first
assertThat(messages.get(0).text, containsString("role [_system] is reserved"));
assertThat(messages.get(1).text, containsString("role [superuser] is reserved"));
assertThat(messages.get(2).text, containsString("role [kibana] is reserved"));
assertThat(messages.get(3).text, containsString("role [transport_client] is reserved"));
assertThat(events.get(0), containsString("role [_system] is reserved"));
assertThat(events.get(1), containsString("role [superuser] is reserved"));
assertThat(events.get(2), containsString("role [kibana] is reserved"));
assertThat(events.get(3), containsString("role [transport_client] is reserved"));
}
public void testUsageStats() throws Exception {

View File

@ -5,8 +5,6 @@
*/
package org.elasticsearch.xpack.extensions;
import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.varia.NullAppender;
import org.elasticsearch.cli.MultiCommand;
import org.elasticsearch.cli.Terminal;
@ -15,7 +13,7 @@ import org.elasticsearch.cli.Terminal;
*/
public class XPackExtensionCli extends MultiCommand {
public XPackExtensionCli() {
private XPackExtensionCli() {
super("A tool for managing installed x-pack extensions");
subcommands.put("list", new ListXPackExtensionCommand());
subcommands.put("install", new InstallXPackExtensionCommand());
@ -23,7 +21,6 @@ public class XPackExtensionCli extends MultiCommand {
}
public static void main(String[] args) throws Exception {
BasicConfigurator.configure(new NullAppender());
exit(new XPackExtensionCli().main(args, Terminal.DEFAULT));
}

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.extensions;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.bootstrap.JarHell;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
@ -19,11 +19,11 @@ import java.net.URLClassLoader;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.stream.Collectors;
import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory;
@ -84,7 +84,7 @@ public class XPackExtensionsService {
}
static List<Bundle> getExtensionBundles(Path extsDirectory) throws IOException {
ESLogger logger = Loggers.getLogger(XPackExtensionsService.class);
Logger logger = Loggers.getLogger(XPackExtensionsService.class);
// TODO: remove this leniency, but tests bogusly rely on it
if (!isAccessibleDirectory(extsDirectory, logger)) {

View File

@ -5,6 +5,13 @@
*/
package org.elasticsearch.xpack.notification.email;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.crypto.CryptoService;
import javax.activation.CommandMap;
import javax.activation.MailcapCommandMap;
import javax.mail.MessagingException;
@ -17,13 +24,6 @@ import java.security.PrivilegedAction;
import java.util.Map;
import java.util.Properties;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.security.crypto.CryptoService;
/**
*
*/
@ -63,10 +63,10 @@ public class Account {
private final Config config;
private final CryptoService cryptoService;
private final ESLogger logger;
private final Logger logger;
private final Session session;
Account(Config config, CryptoService cryptoService, ESLogger logger) {
Account(Config config, CryptoService cryptoService, Logger logger) {
this.config = config;
this.cryptoService = cryptoService;
this.logger = logger;

View File

@ -5,14 +5,14 @@
*/
package org.elasticsearch.xpack.notification.email;
import java.util.HashMap;
import java.util.Map;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.xpack.security.crypto.CryptoService;
import java.util.HashMap;
import java.util.Map;
/**
*
*/
@ -21,7 +21,7 @@ public class Accounts {
private final String defaultAccountName;
private final Map<String, Account> accounts;
public Accounts(Settings settings, CryptoService cryptoService, ESLogger logger) {
public Accounts(Settings settings, CryptoService cryptoService, Logger logger) {
Settings accountsSettings = settings.getAsSettings("account");
accounts = new HashMap<>();
for (String name : accountsSettings.names()) {

View File

@ -5,17 +5,16 @@
*/
package org.elasticsearch.xpack.notification.email;
import javax.mail.MessagingException;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.security.crypto.CryptoService;
import javax.mail.MessagingException;
/**
* A component to store email credentials and handle sending email notifications.
*/
@ -62,7 +61,7 @@ public class EmailService extends AbstractComponent {
return new EmailSent(account.name(), email);
}
protected Accounts createAccounts(Settings settings, ESLogger logger) {
protected Accounts createAccounts(Settings settings, Logger logger) {
return new Accounts(settings, cryptoService, logger);
}

View File

@ -5,16 +5,15 @@
*/
package org.elasticsearch.xpack.notification.email.attachment;
import java.io.IOException;
import java.util.Map;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.common.http.HttpClient;
@ -27,6 +26,9 @@ import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.support.Variables;
import org.elasticsearch.xpack.watcher.watch.Payload;
import java.io.IOException;
import java.util.Map;
public class HttpEmailAttachementParser implements EmailAttachmentParser<HttpRequestAttachment> {
public interface Fields {
@ -39,7 +41,7 @@ public class HttpEmailAttachementParser implements EmailAttachmentParser<HttpReq
private final HttpClient httpClient;
private HttpRequestTemplate.Parser requestTemplateParser;
private final TextTemplateEngine templateEngine;
private final ESLogger logger;
private final Logger logger;
public HttpEmailAttachementParser(HttpClient httpClient, HttpRequestTemplate.Parser requestTemplateParser,
TextTemplateEngine templateEngine) {
@ -108,8 +110,14 @@ public class HttpEmailAttachementParser implements EmailAttachmentParser<HttpReq
httpRequest.host(), httpRequest.port(), httpRequest.method(), httpRequest.path(), response.status());
}
} catch (IOException e) {
logger.error("Error executing HTTP request: [host[{}], port[{}], method[{}], path[{}]: [{}]", e, httpRequest.host(),
httpRequest.port(), httpRequest.method(), httpRequest.path(), e.getMessage());
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"Error executing HTTP request: [host[{}], port[{}], method[{}], path[{}]",
httpRequest.host(),
httpRequest.port(),
httpRequest.method(),
httpRequest.path()),
e);
}
throw new ElasticsearchException("Unable to get attachment of type [{}] with id [{}] in watch [{}] aborting watch execution",

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.notification.hipchat;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.xcontent.ToXContent;
@ -32,7 +32,7 @@ public abstract class HipChatAccount {
public static final String DEFAULT_COLOR_SETTING = "message_defaults." + HipChatMessage.Field.COLOR.getPreferredName();
public static final String DEFAULT_NOTIFY_SETTING = "message_defaults." + HipChatMessage.Field.NOTIFY.getPreferredName();
protected final ESLogger logger;
protected final Logger logger;
protected final String name;
protected final Profile profile;
protected final HipChatServer server;
@ -40,7 +40,7 @@ public abstract class HipChatAccount {
protected final String authToken;
protected HipChatAccount(String name, Profile profile, Settings settings, HipChatServer defaultServer, HttpClient httpClient,
ESLogger logger) {
Logger logger) {
this.name = name;
this.profile = profile;
this.server = new HipChatServer(settings, defaultServer);
@ -66,27 +66,27 @@ public abstract class HipChatAccount {
V1() {
@Override
HipChatAccount createAccount(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient,
ESLogger logger) {
Logger logger) {
return new V1Account(name, settings, defaultServer, httpClient, logger);
}
},
INTEGRATION() {
@Override
HipChatAccount createAccount(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient,
ESLogger logger) {
Logger logger) {
return new IntegrationAccount(name, settings, defaultServer, httpClient, logger);
}
},
USER() {
@Override
HipChatAccount createAccount(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient,
ESLogger logger) {
Logger logger) {
return new UserAccount(name, settings, defaultServer, httpClient, logger);
}
};
abstract HipChatAccount createAccount(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient,
ESLogger logger);
Logger logger);
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.notification.hipchat;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.xpack.notification.hipchat.HipChatAccount.Profile;
import org.elasticsearch.xpack.common.http.HttpClient;
import org.elasticsearch.xpack.notification.hipchat.HipChatAccount.Profile;
import java.util.HashMap;
import java.util.Map;
@ -22,7 +22,7 @@ public class HipChatAccounts {
private final Map<String, HipChatAccount> accounts;
private final String defaultAccountName;
public HipChatAccounts(Settings settings, HttpClient httpClient, ESLogger logger) {
public HipChatAccounts(Settings settings, HttpClient httpClient, Logger logger) {
HipChatServer defaultServer = new HipChatServer(settings);
Settings accountsSettings = settings.getAsSettings("account");
accounts = new HashMap<>();

View File

@ -5,24 +5,24 @@
*/
package org.elasticsearch.xpack.notification.hipchat;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.actions.hipchat.HipChatAction;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Color;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Format;
import org.elasticsearch.xpack.common.http.HttpClient;
import org.elasticsearch.xpack.common.http.HttpMethod;
import org.elasticsearch.xpack.common.http.HttpRequest;
import org.elasticsearch.xpack.common.http.HttpResponse;
import org.elasticsearch.xpack.common.http.Scheme;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Color;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Format;
import org.elasticsearch.xpack.watcher.actions.hipchat.HipChatAction;
import java.io.IOException;
import java.util.ArrayList;
@ -39,7 +39,7 @@ public class IntegrationAccount extends HipChatAccount {
final String room;
final Defaults defaults;
public IntegrationAccount(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient, ESLogger logger) {
public IntegrationAccount(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient, Logger logger) {
super(name, Profile.INTEGRATION, settings, defaultServer, httpClient, logger);
String[] rooms = settings.getAsArray(ROOM_SETTING, null);
if (rooms == null || rooms.length == 0) {

View File

@ -5,24 +5,24 @@
*/
package org.elasticsearch.xpack.notification.hipchat;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.actions.hipchat.HipChatAction;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Color;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Format;
import org.elasticsearch.xpack.common.http.HttpClient;
import org.elasticsearch.xpack.common.http.HttpMethod;
import org.elasticsearch.xpack.common.http.HttpRequest;
import org.elasticsearch.xpack.common.http.HttpResponse;
import org.elasticsearch.xpack.common.http.Scheme;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Color;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Format;
import org.elasticsearch.xpack.watcher.actions.hipchat.HipChatAction;
import java.io.IOException;
import java.util.ArrayList;
@ -38,7 +38,7 @@ public class UserAccount extends HipChatAccount {
final Defaults defaults;
public UserAccount(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient, ESLogger logger) {
public UserAccount(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient, Logger logger) {
super(name, Profile.USER, settings, defaultServer, httpClient, logger);
defaults = new Defaults(settings);
}

View File

@ -5,20 +5,20 @@
*/
package org.elasticsearch.xpack.notification.hipchat;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.actions.hipchat.HipChatAction;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Color;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Format;
import org.elasticsearch.xpack.common.http.HttpClient;
import org.elasticsearch.xpack.common.http.HttpMethod;
import org.elasticsearch.xpack.common.http.HttpRequest;
import org.elasticsearch.xpack.common.http.HttpResponse;
import org.elasticsearch.xpack.common.http.Scheme;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Color;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage.Format;
import org.elasticsearch.xpack.watcher.actions.hipchat.HipChatAction;
import java.util.ArrayList;
import java.util.List;
@ -33,7 +33,7 @@ public class V1Account extends HipChatAccount {
final Defaults defaults;
public V1Account(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient, ESLogger logger) {
public V1Account(String name, Settings settings, HipChatServer defaultServer, HttpClient httpClient, Logger logger) {
super(name, Profile.V1, settings, defaultServer, httpClient, logger);
defaults = new Defaults(settings);
}

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.notification.pagerduty;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.xpack.common.http.HttpClient;
@ -27,9 +27,9 @@ public class PagerDutyAccount {
final String serviceKey;
final HttpClient httpClient;
final IncidentEventDefaults eventDefaults;
final ESLogger logger;
final Logger logger;
public PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient, ESLogger logger) {
public PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient, Logger logger) {
this.name = name;
this.serviceKey = accountSettings.get(SERVICE_KEY_SETTING, serviceSettings.get(SERVICE_KEY_SETTING, null));
if (this.serviceKey == null) {

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.notification.pagerduty;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.xpack.common.http.HttpClient;
@ -21,7 +21,7 @@ public class PagerDutyAccounts {
private final Map<String, PagerDutyAccount> accounts;
private final String defaultAccountName;
public PagerDutyAccounts(Settings serviceSettings, HttpClient httpClient, ESLogger logger) {
public PagerDutyAccounts(Settings serviceSettings, HttpClient httpClient, Logger logger) {
Settings accountsSettings = serviceSettings.getAsSettings("account");
accounts = new HashMap<>();
for (String name : accountsSettings.names()) {

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.notification.slack;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.xcontent.ToXContent;
@ -37,10 +37,10 @@ public class SlackAccount {
final String name;
final URI url;
final HttpClient httpClient;
final ESLogger logger;
final Logger logger;
final SlackMessageDefaults messageDefaults;
public SlackAccount(String name, Settings settings, Settings defaultSettings, HttpClient httpClient, ESLogger logger) {
public SlackAccount(String name, Settings settings, Settings defaultSettings, HttpClient httpClient, Logger logger) {
this.name = name;
this.url = url(name, settings, defaultSettings);
this.messageDefaults = new SlackMessageDefaults(settings.getAsSettings(MESSAGE_DEFAULTS_SETTING));

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.notification.slack;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.xpack.common.http.HttpClient;
@ -21,7 +21,7 @@ public class SlackAccounts {
private final Map<String, SlackAccount> accounts;
private final String defaultAccountName;
public SlackAccounts(Settings settings, HttpClient httpClient, ESLogger logger) {
public SlackAccounts(Settings settings, HttpClient httpClient, Logger logger) {
Settings accountsSettings = settings.getAsSettings("account");
accounts = new HashMap<>();
for (String name : accountsSettings.names()) {

View File

@ -5,12 +5,11 @@
*/
package org.elasticsearch.xpack.notification.email;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.common.secret.Secret;
import org.junit.After;
import org.junit.Before;
import java.util.Collections;
@ -32,7 +31,7 @@ public class EmailServiceTests extends ESTestCase {
service = new EmailService(Settings.EMPTY, null,
new ClusterSettings(Settings.EMPTY, Collections.singleton(EmailService.EMAIL_ACCOUNT_SETTING))) {
@Override
protected Accounts createAccounts(Settings settings, ESLogger logger) {
protected Accounts createAccounts(Settings settings, Logger logger) {
return accounts;
}
};

View File

@ -5,8 +5,10 @@
*/
package org.elasticsearch.xpack.notification.email.support;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.transport.PortsRange;
import org.subethamail.smtp.TooMuchDataException;
import org.subethamail.smtp.auth.EasyAuthenticationHandlerFactory;
@ -42,7 +44,7 @@ public class EmailServer {
private final SMTPServer server;
public EmailServer(String host, int port, final String username, final String password, final ESLogger logger) {
public EmailServer(String host, int port, final String username, final String password, final Logger logger) {
server = new SMTPServer(new SimpleMessageListenerAdapter(new SimpleMessageListener() {
@Override
public boolean accept(String from, String recipient) {
@ -98,7 +100,7 @@ public class EmailServer {
return new Listener.Handle(listeners, listener);
}
public static EmailServer localhost(String portRangeStr, final String username, final String password, final ESLogger logger) {
public static EmailServer localhost(String portRangeStr, final String username, final String password, final Logger logger) {
final AtomicReference<EmailServer> emailServer = new AtomicReference<>();
boolean bound = new PortsRange(portRangeStr).iterate(new PortsRange.PortCallback() {
@Override
@ -110,7 +112,8 @@ public class EmailServer {
return true;
} catch (RuntimeException re) {
if (re.getCause() instanceof BindException) {
logger.warn("port [{}] was already in use trying next port", re, port);
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage("port [{}] was already in use trying next port", port), re);
return false;
} else {
throw re;

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.notification.hipchat;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.xcontent.ToXContent;
@ -71,7 +71,7 @@ public class IntegrationAccountTests extends ESTestCase {
Settings settings = sb.build();
IntegrationAccount account = new IntegrationAccount(accountName, settings, HipChatServer.DEFAULT, mock(HttpClient.class),
mock(ESLogger.class));
mock(Logger.class));
assertThat(account.profile, is(HipChatAccount.Profile.INTEGRATION));
assertThat(account.name, equalTo(accountName));
@ -88,7 +88,7 @@ public class IntegrationAccountTests extends ESTestCase {
Settings.Builder sb = Settings.builder();
sb.put(IntegrationAccount.ROOM_SETTING, randomAsciiOfLength(10));
try {
new IntegrationAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(ESLogger.class));
new IntegrationAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class));
fail("Expected SettingsException");
} catch (SettingsException e) {
assertThat(e.getMessage(), is("hipchat account [_name] missing required [auth_token] setting"));
@ -99,7 +99,7 @@ public class IntegrationAccountTests extends ESTestCase {
Settings.Builder sb = Settings.builder();
sb.put(IntegrationAccount.AUTH_TOKEN_SETTING, randomAsciiOfLength(50));
try {
new IntegrationAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(ESLogger.class));
new IntegrationAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class));
fail("Expected SettingsException");
} catch (SettingsException e) {
assertThat(e.getMessage(), containsString("missing required [room] setting for [integration] account profile"));
@ -111,7 +111,7 @@ public class IntegrationAccountTests extends ESTestCase {
sb.put(IntegrationAccount.AUTH_TOKEN_SETTING, randomAsciiOfLength(50));
sb.put(IntegrationAccount.ROOM_SETTING, "_r1,_r2");
try {
new IntegrationAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(ESLogger.class));
new IntegrationAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class));
fail("Expected SettingsException");
} catch (SettingsException e) {
assertThat(e.getMessage(), containsString("[room] setting for [integration] account must only be set with a single value"));
@ -125,7 +125,7 @@ public class IntegrationAccountTests extends ESTestCase {
.put("port", "443")
.put("auth_token", "_token")
.put("room", "_room")
.build(), HipChatServer.DEFAULT, httpClient, mock(ESLogger.class));
.build(), HipChatServer.DEFAULT, httpClient, mock(Logger.class));
HipChatMessage.Format format = randomFrom(HipChatMessage.Format.values());
HipChatMessage.Color color = randomFrom(HipChatMessage.Color.values());

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.notification.hipchat;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.xcontent.ToXContent;
@ -83,7 +83,7 @@ public class UserAccountTests extends ESTestCase {
}
Settings settings = sb.build();
UserAccount account = new UserAccount(accountName, settings, HipChatServer.DEFAULT, mock(HttpClient.class), mock(ESLogger.class));
UserAccount account = new UserAccount(accountName, settings, HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class));
assertThat(account.profile, is(HipChatAccount.Profile.USER));
assertThat(account.name, equalTo(accountName));
@ -108,7 +108,7 @@ public class UserAccountTests extends ESTestCase {
public void testSettingsNoAuthToken() throws Exception {
Settings.Builder sb = Settings.builder();
try {
new UserAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(ESLogger.class));
new UserAccount("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class));
fail("Expected SettingsException");
} catch (SettingsException e) {
assertThat(e.getMessage(), is("hipchat account [_name] missing required [auth_token] setting"));
@ -121,7 +121,7 @@ public class UserAccountTests extends ESTestCase {
.put("host", "_host")
.put("port", "443")
.put("auth_token", "_token")
.build(), HipChatServer.DEFAULT, httpClient, mock(ESLogger.class));
.build(), HipChatServer.DEFAULT, httpClient, mock(Logger.class));
HipChatMessage.Format format = randomFrom(HipChatMessage.Format.values());
HipChatMessage.Color color = randomFrom(HipChatMessage.Color.values());

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.notification.hipchat;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.test.ESTestCase;
@ -74,7 +74,7 @@ public class V1AccountTests extends ESTestCase {
}
Settings settings = sb.build();
V1Account account = new V1Account(accountName, settings, HipChatServer.DEFAULT, mock(HttpClient.class), mock(ESLogger.class));
V1Account account = new V1Account(accountName, settings, HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class));
assertThat(account.profile, is(HipChatAccount.Profile.V1));
assertThat(account.name, equalTo(accountName));
@ -95,7 +95,7 @@ public class V1AccountTests extends ESTestCase {
public void testSettingsNoAuthToken() throws Exception {
Settings.Builder sb = Settings.builder();
try {
new V1Account("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(ESLogger.class));
new V1Account("_name", sb.build(), HipChatServer.DEFAULT, mock(HttpClient.class), mock(Logger.class));
fail("Expected SettingsException");
} catch (SettingsException e) {
assertThat(e.getMessage(), is("hipchat account [_name] missing required [auth_token] setting"));
@ -108,7 +108,7 @@ public class V1AccountTests extends ESTestCase {
.put("host", "_host")
.put("port", "443")
.put("auth_token", "_token")
.build(), HipChatServer.DEFAULT, httpClient, mock(ESLogger.class));
.build(), HipChatServer.DEFAULT, httpClient, mock(Logger.class));
HipChatMessage.Format format = randomFrom(HipChatMessage.Format.values());
HipChatMessage.Color color = randomFrom(HipChatMessage.Color.values());

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.watcher;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.Client;
@ -12,7 +13,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.regex.Regex;
@ -96,7 +96,7 @@ public class Watcher implements ActionPlugin, ScriptPlugin {
private static final ScriptContext.Plugin SCRIPT_PLUGIN = new ScriptContext.Plugin("xpack", "watch");
public static final ScriptContext SCRIPT_CONTEXT = SCRIPT_PLUGIN::getKey;
private static final ESLogger logger = Loggers.getLogger(XPackPlugin.class);
private static final Logger logger = Loggers.getLogger(XPackPlugin.class);
static {
MetaData.registerPrototype(WatcherMetaData.TYPE, WatcherMetaData.PROTO);

View File

@ -5,14 +5,16 @@
*/
package org.elasticsearch.xpack.watcher;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ack.AckedRequest;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.component.LifecycleListener;
import org.elasticsearch.common.inject.Inject;
@ -201,7 +203,7 @@ public class WatcherLifeCycleService extends AbstractComponent implements Cluste
@Override
public void onFailure(String source, Exception throwable) {
latch.countDown();
logger.warn("couldn't update watcher metadata [{}]", throwable, source);
logger.warn((Supplier<?>) () -> new ParameterizedMessage("couldn't update watcher metadata [{}]", source), throwable);
}
});
try {

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.actions;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
@ -15,9 +15,9 @@ import java.io.IOException;
*/
public abstract class ActionFactory<A extends Action, E extends ExecutableAction<A>> {
protected final ESLogger actionLogger;
protected final Logger actionLogger;
protected ActionFactory(ESLogger actionLogger) {
protected ActionFactory(Logger actionLogger) {
this.actionLogger = actionLogger;
}

View File

@ -5,6 +5,8 @@
*/
package org.elasticsearch.xpack.watcher.actions;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Nullable;
@ -15,6 +17,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.watcher.actions.throttler.ActionThrottler;
import org.elasticsearch.xpack.watcher.actions.throttler.Throttler;
import org.elasticsearch.xpack.watcher.condition.Condition;
@ -22,7 +25,6 @@ import org.elasticsearch.xpack.watcher.condition.ConditionRegistry;
import org.elasticsearch.xpack.watcher.condition.ExecutableCondition;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.watcher.transform.ExecutableTransform;
import org.elasticsearch.xpack.watcher.transform.Transform;
import org.elasticsearch.xpack.watcher.transform.TransformRegistry;
@ -113,7 +115,9 @@ public class ActionWrapper implements ToXContent {
new Action.Result.ConditionFailed(action.type(), "condition not met. skipping"));
}
} catch (RuntimeException e) {
action.logger().error("failed to execute action [{}/{}]. failed to execute condition", e, ctx.watch().id(), id);
action.logger().error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to execute action [{}/{}]. failed to execute condition", ctx.watch().id(), id), e);
return new ActionWrapper.Result(id, new Action.Result.ConditionFailed(action.type(),
"condition failed. skipping: {}", e.getMessage()));
}
@ -131,7 +135,9 @@ public class ActionWrapper implements ToXContent {
}
payload = transformResult.payload();
} catch (Exception e) {
action.logger().error("failed to execute action [{}/{}]. failed to transform payload.", e, ctx.watch().id(), id);
action.logger().error(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to execute action [{}/{}]. failed to transform payload.", ctx.watch().id(), id), e);
return new ActionWrapper.Result(id, conditionResult, null,
new Action.Result.Failure(action.type(), "Failed to transform payload. error: {}",
ExceptionsHelper.detailedMessage(e)));
@ -141,7 +147,8 @@ public class ActionWrapper implements ToXContent {
Action.Result actionResult = action.execute(id, ctx, payload);
return new ActionWrapper.Result(id, conditionResult, transformResult, actionResult);
} catch (Exception e) {
action.logger().error("failed to execute action [{}/{}]", e, ctx.watch().id(), id);
action.logger().error(
(Supplier<?>) () -> new ParameterizedMessage("failed to execute action [{}/{}]", ctx.watch().id(), id), e);
return new ActionWrapper.Result(id, new Action.Result.Failure(action.type(), ExceptionsHelper.detailedMessage(e)));
}
}

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.actions;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
@ -18,9 +18,9 @@ import java.io.IOException;
public abstract class ExecutableAction<A extends Action> implements ToXContent {
protected final A action;
protected final ESLogger logger;
protected final Logger logger;
protected ExecutableAction(A action, ESLogger logger) {
protected ExecutableAction(A action, Logger logger) {
this.action = action;
this.logger = logger;
}
@ -39,7 +39,7 @@ public abstract class ExecutableAction<A extends Action> implements ToXContent {
/**
* yack... needed to expose that for testing purposes
*/
public ESLogger logger() {
public Logger logger() {
return logger;
}

View File

@ -5,20 +5,20 @@
*/
package org.elasticsearch.xpack.watcher.actions.email;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.support.Variables;
import org.elasticsearch.xpack.watcher.watch.Payload;
import org.elasticsearch.xpack.notification.email.Attachment;
import org.elasticsearch.xpack.notification.email.DataAttachment;
import org.elasticsearch.xpack.notification.email.Email;
import org.elasticsearch.xpack.notification.email.EmailService;
import org.elasticsearch.xpack.notification.email.HtmlSanitizer;
import org.elasticsearch.xpack.notification.email.attachment.EmailAttachmentParser;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.support.Variables;
import org.elasticsearch.xpack.watcher.watch.Payload;
import java.util.HashMap;
import java.util.Map;
@ -32,7 +32,7 @@ public class ExecutableEmailAction extends ExecutableAction<EmailAction> {
final HtmlSanitizer htmlSanitizer;
private final Map<String, EmailAttachmentParser> emailAttachmentParsers;
public ExecutableEmailAction(EmailAction action, ESLogger logger, EmailService emailService, TextTemplateEngine templateEngine,
public ExecutableEmailAction(EmailAction action, Logger logger, EmailService emailService, TextTemplateEngine templateEngine,
HtmlSanitizer htmlSanitizer, Map<String, EmailAttachmentParser> emailAttachmentParsers) {
super(action, logger);
this.emailService = emailService;

View File

@ -5,14 +5,14 @@
*/
package org.elasticsearch.xpack.watcher.actions.hipchat;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.notification.hipchat.HipChatAccount;
import org.elasticsearch.xpack.notification.hipchat.HipChatMessage;
import org.elasticsearch.xpack.notification.hipchat.HipChatService;
import org.elasticsearch.xpack.notification.hipchat.SentMessages;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.support.Variables;
import org.elasticsearch.xpack.watcher.watch.Payload;
@ -27,7 +27,7 @@ public class ExecutableHipChatAction extends ExecutableAction<HipChatAction> {
private final TextTemplateEngine templateEngine;
private final HipChatService hipchatService;
public ExecutableHipChatAction(HipChatAction action, ESLogger logger, HipChatService hipchatService,
public ExecutableHipChatAction(HipChatAction action, Logger logger, HipChatService hipchatService,
TextTemplateEngine templateEngine) {
super(action, logger);
this.hipchatService = hipchatService;

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.actions.index;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
@ -12,7 +13,6 @@ import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
@ -38,7 +38,7 @@ public class ExecutableIndexAction extends ExecutableAction<IndexAction> {
private final WatcherClientProxy client;
private final TimeValue timeout;
public ExecutableIndexAction(IndexAction action, ESLogger logger, WatcherClientProxy client, @Nullable TimeValue defaultTimeout) {
public ExecutableIndexAction(IndexAction action, Logger logger, WatcherClientProxy client, @Nullable TimeValue defaultTimeout) {
super(action, logger);
this.client = client;
this.timeout = action.timeout != null ? action.timeout : defaultTimeout;

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.actions.logging;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
@ -22,23 +22,23 @@ import java.util.Map;
*/
public class ExecutableLoggingAction extends ExecutableAction<LoggingAction> {
private final ESLogger textLogger;
private final Logger textLogger;
private final TextTemplateEngine templateEngine;
ExecutableLoggingAction(LoggingAction action, ESLogger logger, Settings settings, TextTemplateEngine templateEngine) {
ExecutableLoggingAction(LoggingAction action, Logger logger, Settings settings, TextTemplateEngine templateEngine) {
super(action, logger);
this.textLogger = action.category != null ? Loggers.getLogger(action.category, settings) : logger;
this.templateEngine = templateEngine;
}
// for tests
ExecutableLoggingAction(LoggingAction action, ESLogger logger, ESLogger textLogger, TextTemplateEngine templateEngine) {
ExecutableLoggingAction(LoggingAction action, Logger logger, Logger textLogger, TextTemplateEngine templateEngine) {
super(action, logger);
this.textLogger = textLogger;
this.templateEngine = templateEngine;
}
ESLogger textLogger() {
Logger textLogger() {
return textLogger;
}

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.watcher.actions.logging;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -21,40 +21,40 @@ public enum LoggingLevel implements ToXContent {
ERROR() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
void log(Logger logger, String text) {
logger.error(text);
}
},
WARN() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
void log(Logger logger, String text) {
logger.warn(text);
}
},
INFO() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
void log(Logger logger, String text) {
logger.info(text);
}
},
DEBUG() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
void log(Logger logger, String text) {
logger.debug(text);
}
},
TRACE() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
void log(Logger logger, String text) {
logger.trace(text);
}
};
abstract void log(ESLogger logger, String text);
abstract void log(Logger logger, String text);
@Override

View File

@ -5,14 +5,14 @@
*/
package org.elasticsearch.xpack.watcher.actions.pagerduty;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.notification.pagerduty.IncidentEvent;
import org.elasticsearch.xpack.notification.pagerduty.PagerDutyAccount;
import org.elasticsearch.xpack.notification.pagerduty.PagerDutyService;
import org.elasticsearch.xpack.notification.pagerduty.SentEvent;
import org.elasticsearch.xpack.notification.pagerduty.IncidentEvent;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.support.Variables;
import org.elasticsearch.xpack.watcher.watch.Payload;
@ -27,7 +27,7 @@ public class ExecutablePagerDutyAction extends ExecutableAction<PagerDutyAction>
private final TextTemplateEngine templateEngine;
private final PagerDutyService pagerDutyService;
public ExecutablePagerDutyAction(PagerDutyAction action, ESLogger logger, PagerDutyService pagerDutyService,
public ExecutablePagerDutyAction(PagerDutyAction action, Logger logger, PagerDutyService pagerDutyService,
TextTemplateEngine templateEngine) {
super(action, logger);
this.pagerDutyService = pagerDutyService;

View File

@ -5,14 +5,14 @@
*/
package org.elasticsearch.xpack.watcher.actions.slack;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.notification.slack.SentMessages;
import org.elasticsearch.xpack.notification.slack.SlackAccount;
import org.elasticsearch.xpack.notification.slack.SlackService;
import org.elasticsearch.xpack.notification.slack.message.SlackMessage;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.support.Variables;
import org.elasticsearch.xpack.watcher.watch.Payload;
@ -27,7 +27,7 @@ public class ExecutableSlackAction extends ExecutableAction<SlackAction> {
private final TextTemplateEngine templateEngine;
private final SlackService slackService;
public ExecutableSlackAction(SlackAction action, ESLogger logger, SlackService slackService, TextTemplateEngine templateEngine) {
public ExecutableSlackAction(SlackAction action, Logger logger, SlackService slackService, TextTemplateEngine templateEngine) {
super(action, logger);
this.slackService = slackService;
this.templateEngine = templateEngine;

View File

@ -5,15 +5,15 @@
*/
package org.elasticsearch.xpack.watcher.actions.webhook;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.xpack.common.http.HttpClient;
import org.elasticsearch.xpack.common.http.HttpRequest;
import org.elasticsearch.xpack.common.http.HttpResponse;
import org.elasticsearch.xpack.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.actions.Action;
import org.elasticsearch.xpack.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.support.Variables;
import org.elasticsearch.xpack.common.http.HttpClient;
import org.elasticsearch.xpack.common.http.HttpRequest;
import org.elasticsearch.xpack.common.http.HttpResponse;
import org.elasticsearch.xpack.watcher.watch.Payload;
import java.util.Map;
@ -25,7 +25,7 @@ public class ExecutableWebhookAction extends ExecutableAction<WebhookAction> {
private final HttpClient httpClient;
private final TextTemplateEngine templateEngine;
public ExecutableWebhookAction(WebhookAction action, ESLogger logger, HttpClient httpClient, TextTemplateEngine templateEngine) {
public ExecutableWebhookAction(WebhookAction action, Logger logger, HttpClient httpClient, TextTemplateEngine templateEngine) {
super(action, logger);
this.httpClient = httpClient;
this.templateEngine = templateEngine;

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.condition;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
@ -15,9 +15,9 @@ import java.io.IOException;
*/
public abstract class ConditionFactory<C extends Condition, R extends Condition.Result, E extends ExecutableCondition<C, R>> {
protected final ESLogger conditionLogger;
protected final Logger conditionLogger;
public ConditionFactory(ESLogger conditionLogger) {
public ConditionFactory(Logger conditionLogger) {
this.conditionLogger = conditionLogger;
}

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.condition;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
@ -18,9 +18,9 @@ import java.io.IOException;
public abstract class ExecutableCondition<C extends Condition, R extends Condition.Result> implements ToXContent {
protected final C condition;
protected final ESLogger logger;
protected final Logger logger;
protected ExecutableCondition(C condition, ESLogger logger) {
protected ExecutableCondition(C condition, Logger logger) {
this.condition = condition;
this.logger = logger;
}

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.condition.always;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.watcher.condition.ExecutableCondition;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
@ -16,7 +16,7 @@ import java.io.IOException;
*/
public class ExecutableAlwaysCondition extends ExecutableCondition<AlwaysCondition, AlwaysCondition.Result> {
public ExecutableAlwaysCondition(ESLogger logger) {
public ExecutableAlwaysCondition(Logger logger) {
super(AlwaysCondition.INSTANCE, logger);
}

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.watcher.condition.compare;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.watcher.condition.Condition;
import org.elasticsearch.xpack.watcher.condition.ExecutableCondition;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.watcher.support.Variables;
import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@ -28,7 +28,7 @@ public abstract class AbstractExecutableCompareCondition<C extends Condition, R
private final Clock clock;
public AbstractExecutableCompareCondition(C condition, ESLogger logger, Clock clock) {
public AbstractExecutableCompareCondition(C condition, Logger logger, Clock clock) {
super(condition, logger);
this.clock = clock;
}

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.condition.compare;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath;
@ -16,7 +16,7 @@ import java.util.Map;
*
*/
public class ExecutableCompareCondition extends AbstractExecutableCompareCondition<CompareCondition, CompareCondition.Result> {
public ExecutableCompareCondition(CompareCondition condition, ESLogger logger, Clock clock) {
public ExecutableCompareCondition(CompareCondition condition, Logger logger, Clock clock) {
super(condition, logger, clock);
}

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.condition.compare.array;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.xpack.support.clock.Clock;
import org.elasticsearch.xpack.watcher.condition.compare.AbstractExecutableCompareCondition;
import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath;
@ -18,7 +18,7 @@ import java.util.Map;
public class ExecutableArrayCompareCondition extends AbstractExecutableCompareCondition<ArrayCompareCondition,
ArrayCompareCondition.Result> {
public ExecutableArrayCompareCondition(ArrayCompareCondition condition, ESLogger logger, Clock clock) {
public ExecutableArrayCompareCondition(ArrayCompareCondition condition, Logger logger, Clock clock) {
super(condition, logger, clock);
}

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.condition.never;
import org.elasticsearch.common.logging.ESLogger;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.watcher.condition.ExecutableCondition;
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
@ -16,7 +16,7 @@ import java.io.IOException;
*/
public class ExecutableNeverCondition extends ExecutableCondition<NeverCondition, NeverCondition.Result> {
public ExecutableNeverCondition(ESLogger logger) {
public ExecutableNeverCondition(Logger logger) {
super(NeverCondition.INSTANCE, logger);
}

Some files were not shown because too many files have changed in this diff Show More