Avoid allocating log parameterized messages
This commit modifies the call sites that allocate a parameterized message to use a supplier so that allocations are avoided unless the log level is fine enough to emit the corresponding log message. Original commit: elastic/x-pack-elasticsearch@955ab89b8b
This commit is contained in:
parent
702d55f0be
commit
0a285a9a0c
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.license;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
|
@ -301,7 +302,7 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
|
|||
|
||||
@Override
|
||||
public void onFailure(String source, @Nullable Exception e) {
|
||||
logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("unexpected failure during [{}]", source), e);
|
||||
}
|
||||
|
||||
});
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.license;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.license.License.OperationMode;
|
||||
import org.elasticsearch.watcher.FileChangesListener;
|
||||
import org.elasticsearch.watcher.FileWatcher;
|
||||
|
@ -97,7 +98,7 @@ public final class OperationModeFileWatcher implements FileChangesListener {
|
|||
content = Files.readAllBytes(licenseModePath);
|
||||
} catch (IOException e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"couldn't read operation mode from [{}]",
|
||||
licenseModePath.toAbsolutePath().toString()),
|
||||
e);
|
||||
|
@ -108,7 +109,11 @@ public final class OperationModeFileWatcher implements FileChangesListener {
|
|||
currentOperationMode = OperationMode.resolve(operationMode);
|
||||
} catch (IllegalArgumentException e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage("invalid operation mode in [{}]", licenseModePath.toAbsolutePath().toString()), e);
|
||||
(Supplier<?>)
|
||||
() -> new ParameterizedMessage(
|
||||
"invalid operation mode in [{}]",
|
||||
licenseModePath.toAbsolutePath().toString()),
|
||||
e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.monitoring.agent;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
|
@ -158,7 +159,7 @@ public class AgentService extends AbstractLifecycleComponent {
|
|||
try {
|
||||
exporter.close();
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to close exporter [{}]", exporter.name()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to close exporter [{}]", exporter.name()), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.monitoring.agent.collector;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
|
@ -81,7 +82,7 @@ public abstract class AbstractCollector extends AbstractLifecycleComponent imple
|
|||
} catch (ElasticsearchTimeoutException e) {
|
||||
logger.error("collector [{}] timed out when collecting data", name());
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("collector [{}] - failed collecting data", name()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("collector [{}] - failed collecting data", name()), e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.monitoring.agent.collector.cluster;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
|
@ -67,7 +68,9 @@ public class ClusterStatsCollector extends AbstractCollector {
|
|||
clusterStats = client.admin().cluster().prepareClusterStats().get(monitoringSettings.clusterStatsTimeout());
|
||||
} catch (ElasticsearchSecurityException e) {
|
||||
if (LicenseUtils.isLicenseExpiredException(e)) {
|
||||
logger.trace(new ParameterizedMessage("collector [{}] - unable to collect data because of expired license", name()), e);
|
||||
logger.trace(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"collector [{}] - unable to collect data because of expired license", name()), e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.monitoring.agent.exporter;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.component.Lifecycle;
|
||||
|
@ -87,7 +88,7 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable<Ex
|
|||
try {
|
||||
exporter.close();
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to close exporter [{}]", exporter.name()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to close exporter [{}]", exporter.name()), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -108,7 +109,8 @@ public class Exporters extends AbstractLifecycleComponent implements Iterable<Ex
|
|||
bulks.add(bulk);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("exporter [{}] failed to open exporting bulk", exporter.name()), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage("exporter [{}] failed to open exporting bulk", exporter.name()), e);
|
||||
}
|
||||
}
|
||||
return bulks.isEmpty() ? null : new ExportBulk.Compound(bulks);
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.monitoring.agent.exporter.http;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
|
@ -337,7 +338,7 @@ public class HttpExporter extends Exporter {
|
|||
doc.getClass().getName(), doc.getMonitoringId(), doc.getMonitoringVersion());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn(new ParameterizedMessage("failed to render document [{}], skipping it", doc), e);
|
||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to render document [{}], skipping it", doc), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -400,7 +401,9 @@ public class HttpExporter extends Exporter {
|
|||
continue;
|
||||
}
|
||||
} catch (ElasticsearchException e) {
|
||||
logger.error(new ParameterizedMessage("exception when checking remote cluster version on host [{}]", host), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"exception when checking remote cluster version on host [{}]", host), e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -498,9 +501,9 @@ public class HttpExporter extends Exporter {
|
|||
|
||||
return conn;
|
||||
} catch (URISyntaxException e) {
|
||||
logger.error(new ParameterizedMessage("error parsing host [{}]", host), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("error parsing host [{}]", host), e);
|
||||
} catch (IOException e) {
|
||||
logger.error(new ParameterizedMessage("error connecting to [{}]", host), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("error connecting to [{}]", host), e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -569,7 +572,9 @@ public class HttpExporter extends Exporter {
|
|||
return true;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to verify the monitoring pipeline [{}] on [{}]", EXPORT_PIPELINE_NAME, host), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to verify the monitoring pipeline [{}] on [{}]", EXPORT_PIPELINE_NAME, host), e);
|
||||
return false;
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
|
@ -605,7 +610,9 @@ public class HttpExporter extends Exporter {
|
|||
logger.info("monitoring pipeline [{}] set", EXPORT_PIPELINE_NAME);
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
logger.error(new ParameterizedMessage("failed to update monitoring pipeline [{}] on host [{}]", EXPORT_PIPELINE_NAME, host), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to update monitoring pipeline [{}] on host [{}]", EXPORT_PIPELINE_NAME, host), e);
|
||||
return false;
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
|
@ -659,7 +666,9 @@ public class HttpExporter extends Exporter {
|
|||
return true;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to verify the monitoring template [{}] on [{}]", templateName, host), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to verify the monitoring template [{}] on [{}]", templateName, host), e);
|
||||
return false;
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
|
@ -693,7 +702,9 @@ public class HttpExporter extends Exporter {
|
|||
logger.info("monitoring template [{}] updated ", template);
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
logger.error(new ParameterizedMessage("failed to update monitoring template [{}] on host [{}]", template, host), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to update monitoring template [{}] on host [{}]", template, host), e);
|
||||
return false;
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.monitoring.agent.exporter.local;
|
|||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
|
@ -386,7 +387,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to set monitoring index {} [{}]", type, name), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to set monitoring index {} [{}]", type, name), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
|
@ -147,7 +148,7 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste
|
|||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
updateMappingPending.set(false);
|
||||
logger.warn(new ParameterizedMessage("failed to update mapping for [{}] on security index", type), e);
|
||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to update mapping for [{}] on security index", type), e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.action.role;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
|
@ -50,7 +51,7 @@ public class TransportDeleteRoleAction extends HandledTransportAction<DeleteRole
|
|||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to delete role [{}]", request.name()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to delete role [{}]", request.name()), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.action.role;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
|
@ -80,7 +81,7 @@ public class TransportGetRolesAction extends HandledTransportAction<GetRolesRequ
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception t) {
|
||||
logger.error(new ParameterizedMessage("failed to retrieve role [{}]", rolename), t);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve role [{}]", rolename), t);
|
||||
listener.onFailure(t);
|
||||
}
|
||||
});
|
||||
|
@ -98,7 +99,9 @@ public class TransportGetRolesAction extends HandledTransportAction<GetRolesRequ
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception t) {
|
||||
logger.error(new ParameterizedMessage("failed to retrieve role [{}]", arrayToDelimitedString(request.names(), ",")), t);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to retrieve role [{}]", arrayToDelimitedString(request.names(), ",")), t);
|
||||
listener.onFailure(t);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.action.user;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
|
@ -81,7 +82,7 @@ public class TransportGetUsersAction extends HandledTransportAction<GetUsersRequ
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to retrieve user [{}]", username), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve user [{}]", username), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
@ -98,7 +99,8 @@ public class TransportGetUsersAction extends HandledTransportAction<GetUsersRequ
|
|||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage("failed to retrieve user [{}]", arrayToDelimitedString(request.usernames(), ",")), e);
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to retrieve user [{}]", arrayToDelimitedString(request.usernames(), ",")), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.action.user;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
|
@ -61,7 +62,7 @@ public class TransportPutUserAction extends HandledTransportAction<PutUserReques
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to put user [{}]", request.username()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to put user [{}]", request.username()), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.audit.index;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
|
||||
|
@ -190,7 +191,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
|
|||
events = parse(includedEvents, excludedEvents);
|
||||
} catch (IllegalArgumentException e) {
|
||||
logger.warn(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"invalid event type specified, using default for audit index output. include events [{}], exclude events [{}]",
|
||||
includedEvents,
|
||||
excludedEvents),
|
||||
|
@ -847,7 +848,9 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
|
|||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
|
||||
logger.error(new ParameterizedMessage("failed to bulk index audit events: [{}]", failure.getMessage()), failure);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to bulk index audit events: [{}]", failure.getMessage()), failure);
|
||||
}
|
||||
}).setBulkActions(bulkSize)
|
||||
.setFlushInterval(interval)
|
||||
|
@ -872,7 +875,8 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
|
|||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to update security audit index template [{}]", INDEX_TEMPLATE_NAME), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to update security audit index template [{}]", INDEX_TEMPLATE_NAME), e);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.authc;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -189,7 +190,7 @@ public class AuthenticationService extends AbstractComponent {
|
|||
}
|
||||
} catch (Exception e) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(new ParameterizedMessage("failed to extract token from request: [{}]", request), e);
|
||||
logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to extract token from request: [{}]", request), e);
|
||||
} else {
|
||||
logger.warn("failed to extract token from request: [{}]: {}", request, e.getMessage());
|
||||
}
|
||||
|
@ -229,7 +230,9 @@ public class AuthenticationService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.debug(new ParameterizedMessage("authentication failed for principal [{}], [{}] ", token.principal(), request), e);
|
||||
logger.debug(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"authentication failed for principal [{}], [{}] ", token.principal(), request), e);
|
||||
throw request.exceptionProcessingRequest(e, token);
|
||||
} finally {
|
||||
token.clearCredentials();
|
||||
|
@ -284,7 +287,7 @@ public class AuthenticationService extends AbstractComponent {
|
|||
user = new User(user.principal(), user.roles(), new User(runAsUsername, Strings.EMPTY_ARRAY));
|
||||
} catch (Exception e) {
|
||||
logger.debug(
|
||||
new ParameterizedMessage("run as failed for principal [{}], [{}], run as username [{}]",
|
||||
(Supplier<?>) () -> new ParameterizedMessage("run as failed for principal [{}], [{}], run as username [{}]",
|
||||
token.principal(),
|
||||
request,
|
||||
runAsUsername),
|
||||
|
|
|
@ -15,6 +15,7 @@ import com.unboundid.ldap.sdk.SearchResultEntry;
|
|||
import com.unboundid.ldap.sdk.SearchScope;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope;
|
||||
|
@ -57,7 +58,7 @@ public class ActiveDirectoryGroupsResolver implements GroupsResolver {
|
|||
try {
|
||||
results = search(connection, searchRequest, logger);
|
||||
} catch (LDAPException e) {
|
||||
logger.error(new ParameterizedMessage("failed to fetch AD groups for DN [{}]", userDn), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to fetch AD groups for DN [{}]", userDn), e);
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
|
@ -93,7 +94,7 @@ public class ActiveDirectoryGroupsResolver implements GroupsResolver {
|
|||
}
|
||||
return Filter.createORFilter(orFilters);
|
||||
} catch (LDAPException e) {
|
||||
logger.error(new ParameterizedMessage("failed to fetch AD groups for DN [{}]", userDn), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to fetch AD groups for DN [{}]", userDn), e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import com.carrotsearch.hppc.ObjectLongHashMap;
|
|||
import com.carrotsearch.hppc.ObjectLongMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
|
@ -164,7 +165,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
// we call the response with a null user
|
||||
listener.onResponse(null);
|
||||
} else {
|
||||
logger.debug(new ParameterizedMessage("failed to retrieve user [{}]", username), t);
|
||||
logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve user [{}]", username), t);
|
||||
listener.onFailure(t);
|
||||
}
|
||||
}
|
||||
|
@ -243,7 +244,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("unable to retrieve users {}", Arrays.toString(usernames)), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to retrieve users {}", Arrays.toString(usernames)), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
@ -260,9 +261,11 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
@Override
|
||||
public void onFailure(Exception t) {
|
||||
if (t instanceof IndexNotFoundException) {
|
||||
logger.trace(new ParameterizedMessage("failed to retrieve user [{}] since security index does not exist", username), t);
|
||||
logger.trace(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to retrieve user [{}] since security index does not exist", username), t);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("failed to retrieve user [{}]", username), t);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve user [{}]", username), t);
|
||||
}
|
||||
}
|
||||
}, latch));
|
||||
|
@ -288,9 +291,10 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
public void onFailure(Exception t) {
|
||||
if (t instanceof IndexNotFoundException) {
|
||||
logger.trace(
|
||||
new ParameterizedMessage("could not retrieve user [{}] because security index does not exist", user), t);
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"could not retrieve user [{}] because security index does not exist", user), t);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("failed to retrieve user [{}]", user), t);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve user [{}]", user), t);
|
||||
}
|
||||
// We don't invoke the onFailure listener here, instead
|
||||
// we call the response with a null user
|
||||
|
@ -301,7 +305,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
logger.trace("could not retrieve user [{}] because security index does not exist", user);
|
||||
listener.onResponse(null);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("unable to retrieve user [{}]", user), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to retrieve user [{}]", user), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
@ -347,7 +351,9 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
if (docType.equals(RESERVED_USER_DOC_TYPE)) {
|
||||
createReservedUser(username, request.passwordHash(), request.getRefreshPolicy(), listener);
|
||||
} else {
|
||||
logger.debug(new ParameterizedMessage("failed to change password for user [{}]", request.username()), cause);
|
||||
logger.debug(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to change password for user [{}]", request.username()), cause);
|
||||
ValidationException validationException = new ValidationException();
|
||||
validationException.addValidationError("user must exist in order to change password");
|
||||
listener.onFailure(validationException);
|
||||
|
@ -386,7 +392,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
indexUser(request, listener);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("unable to put user [{}]", request.username()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to put user [{}]", request.username()), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
@ -423,7 +429,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
// if the index doesn't exist we can never update a user
|
||||
// if the document doesn't exist, then this update is not valid
|
||||
logger.debug(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to update user document with username [{}]",
|
||||
putUserRequest.username()),
|
||||
cause);
|
||||
|
@ -620,12 +626,14 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
public void onFailure(Exception e) {
|
||||
if (e instanceof IndexNotFoundException) {
|
||||
logger.trace(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"could not retrieve built in user [{}] password since security index does not exist",
|
||||
username),
|
||||
e);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("failed to retrieve built in user [{}] password", username), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to retrieve built in user [{}] password", username), e);
|
||||
failure.set(e);
|
||||
}
|
||||
}
|
||||
|
@ -659,7 +667,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
@Override
|
||||
public void onFailure(Exception t) {
|
||||
// Not really much to do here except for warn about it...
|
||||
logger.warn(new ParameterizedMessage("failed to clear scroll [{}]", scrollId), t);
|
||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to clear scroll [{}]", scrollId), t);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -676,7 +684,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(new ParameterizedMessage("unable to clear realm cache for user [{}]", username), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to clear realm cache for user [{}]", username), e);
|
||||
ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + username
|
||||
+ "] failed. please clear the realm cache manually", e);
|
||||
listener.onFailure(exception);
|
||||
|
@ -726,7 +734,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
|
|||
Map<String, Object> metadata = (Map<String, Object>) sourceMap.get(User.Fields.METADATA.getPreferredName());
|
||||
return new UserAndPassword(new User(username, roles, fullName, email, metadata), password.toCharArray());
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("error in the format of data for user [{}]", username), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("error in the format of data for user [{}]", username), e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.authc.esnative;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
|
@ -131,7 +132,8 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
|
|||
}
|
||||
return passwordHash;
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to retrieve password hash for reserved user [{}]", username), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage("failed to retrieve password hash for reserved user [{}]", username), e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authc.file;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.inject.internal.Nullable;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -103,7 +104,8 @@ public class FileUserPasswdStore {
|
|||
return parseFile(path, logger);
|
||||
} catch (Exception e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage("failed to parse users file [{}]. skipping/removing all users...", path.toAbsolutePath()), e);
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to parse users file [{}]. skipping/removing all users...", path.toAbsolutePath()), e);
|
||||
return emptyMap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authc.file;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.internal.Nullable;
|
||||
|
@ -98,7 +99,7 @@ public class FileUserRolesStore {
|
|||
return parseFile(path, logger);
|
||||
} catch (Exception e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage("failed to parse users_roles file [{}]. skipping/removing all entries...",
|
||||
(Supplier<?>) () -> new ParameterizedMessage("failed to parse users_roles file [{}]. skipping/removing all entries...",
|
||||
path.toAbsolutePath()),
|
||||
e);
|
||||
return emptyMap();
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.security.authc.ldap;
|
|||
import com.unboundid.ldap.sdk.LDAPConnection;
|
||||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession;
|
||||
|
@ -65,7 +66,8 @@ public class LdapSessionFactory extends SessionFactory {
|
|||
} catch (LDAPException e) {
|
||||
// we catch the ldapException here since we expect it can happen and we shouldn't be logging this all the time otherwise
|
||||
// it is just noise
|
||||
logger.debug(new ParameterizedMessage("failed LDAP authentication with user template [{}] and DN [{}]", template, dn), e);
|
||||
logger.debug((Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed LDAP authentication with user template [{}] and DN [{}]", template, dn), e);
|
||||
if (lastException == null) {
|
||||
lastException = e;
|
||||
} else {
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authc.ldap.support;
|
|||
|
||||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.xpack.security.authc.RealmConfig;
|
||||
import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm;
|
||||
import org.elasticsearch.xpack.security.authc.support.DnRoleMapper;
|
||||
|
@ -76,7 +77,7 @@ public abstract class AbstractLdapRealm extends CachingUsernamePasswordRealm {
|
|||
|
||||
private void logException(String action, Exception e, String principal) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(new ParameterizedMessage("{} failed for user [{}]", action, principal), e);
|
||||
logger.debug((Supplier<?>) () -> new ParameterizedMessage("{} failed for user [{}]", action, principal), e);
|
||||
} else {
|
||||
String causeMessage = (e.getCause() == null) ? null : e.getCause().getMessage();
|
||||
if (causeMessage == null) {
|
||||
|
|
|
@ -16,6 +16,7 @@ import com.unboundid.ldap.sdk.SearchResult;
|
|||
import com.unboundid.ldap.sdk.SearchResultEntry;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
|
||||
import javax.naming.ldap.Rdn;
|
||||
import java.text.MessageFormat;
|
||||
|
@ -57,7 +58,7 @@ public final class LdapUtils {
|
|||
if (e.getResultCode().equals(ResultCode.REFERRAL) && e.getSearchResult() != null) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"a referral could not be followed for request [{}] so some results may not have been retrieved",
|
||||
searchRequest),
|
||||
e);
|
||||
|
@ -82,7 +83,7 @@ public final class LdapUtils {
|
|||
if (e.getResultCode().equals(ResultCode.REFERRAL) && e.getSearchResult() != null && e.getSearchResult().getEntryCount() > 0) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"a referral could not be followed for request [{}] so some results may not have been retrieved",
|
||||
searchRequest),
|
||||
e);
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authc.pki;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
|
@ -138,7 +139,9 @@ public class PkiRealm extends Realm {
|
|||
break;
|
||||
} catch (CertificateException e) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(new ParameterizedMessage("failed certificate validation for principal [{}]", token.principal()), e);
|
||||
logger.trace(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed certificate validation for principal [{}]", token.principal()), e);
|
||||
} else if (logger.isDebugEnabled()) {
|
||||
logger.debug("failed certificate validation for principal [{}]", token.principal());
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.authc.support;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.common.cache.Cache;
|
||||
import org.elasticsearch.common.cache.CacheBuilder;
|
||||
|
@ -130,7 +131,9 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
|||
}
|
||||
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(new ParameterizedMessage("realm [{}] could not authenticate [{}]", type(), token.principal()), ee);
|
||||
logger.trace(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"realm [{}] could not authenticate [{}]", type(), token.principal()), ee);
|
||||
} else if (logger.isDebugEnabled()) {
|
||||
logger.debug("realm [{}] could not authenticate [{}]", type(), token.principal());
|
||||
}
|
||||
|
@ -160,7 +163,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
|||
return userWithHash.user;
|
||||
} catch (ExecutionException ee) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(new ParameterizedMessage("realm [{}] could not lookup [{}]", name(), username), ee);
|
||||
logger.trace((Supplier<?>) () -> new ParameterizedMessage("realm [{}] could not lookup [{}]", name(), username), ee);
|
||||
} else if (logger.isDebugEnabled()) {
|
||||
logger.debug("realm [{}] could not authenticate [{}]", name(), username);
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import com.unboundid.ldap.sdk.DN;
|
|||
import com.unboundid.ldap.sdk.LDAPException;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -95,9 +96,8 @@ public class DnRoleMapper {
|
|||
return parseFile(path, logger, realmType, realmName);
|
||||
} catch (Exception e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage("failed to parse role mappings file [{}]. skipping/removing all mappings...",
|
||||
path.toAbsolutePath()),
|
||||
e);
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to parse role mappings file [{}]. skipping/removing all mappings...", path.toAbsolutePath()), e);
|
||||
return emptyMap();
|
||||
}
|
||||
}
|
||||
|
@ -129,7 +129,7 @@ public class DnRoleMapper {
|
|||
dnRoles.add(role);
|
||||
} catch (LDAPException e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"invalid DN [{}] found in [{}] role mappings [{}] for realm [{}/{}]. skipping... ",
|
||||
providedDn,
|
||||
realmType,
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authz.store;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -151,7 +152,11 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
|
|||
}
|
||||
|
||||
} catch (IOException ioe) {
|
||||
logger.error(new ParameterizedMessage("failed to read roles file [{}]. skipping all roles...", path.toAbsolutePath()), ioe);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to read roles file [{}]. skipping all roles...",
|
||||
path.toAbsolutePath()),
|
||||
ioe);
|
||||
return emptyMap();
|
||||
}
|
||||
} else {
|
||||
|
@ -181,7 +186,11 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
|
|||
}
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
logger.error(new ParameterizedMessage("failed to read roles file [{}]. skipping all roles...", path.toAbsolutePath()), ioe);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to read roles file [{}]. skipping all roles...",
|
||||
path.toAbsolutePath()),
|
||||
ioe);
|
||||
return emptyMap();
|
||||
}
|
||||
}
|
||||
|
@ -246,16 +255,26 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
|
|||
} catch (ElasticsearchParseException e) {
|
||||
assert roleName != null;
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(new ParameterizedMessage("parsing exception for role [{}]", roleName), e);
|
||||
final String finalRoleName = roleName;
|
||||
logger.debug((Supplier<?>) () -> new ParameterizedMessage("parsing exception for role [{}]", finalRoleName), e);
|
||||
} else {
|
||||
logger.error(e.getMessage() + ". skipping role...");
|
||||
}
|
||||
} catch (IOException e) {
|
||||
if (roleName != null) {
|
||||
final String finalRoleName = roleName;
|
||||
logger.error(
|
||||
new ParameterizedMessage("invalid role definition [{}] in roles file [{}]. skipping role...", roleName, path), e);
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"invalid role definition [{}] in roles file [{}]. skipping role...",
|
||||
finalRoleName,
|
||||
path),
|
||||
e);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("invalid role definition in roles file [{}]. skipping role...", path), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"invalid role definition in roles file [{}]. skipping role...",
|
||||
path),
|
||||
e);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
@ -304,9 +323,8 @@ public class FileRolesStore extends AbstractLifecycleComponent implements RolesS
|
|||
logger.info("updated roles (roles file [{}] changed)", file.toAbsolutePath());
|
||||
} catch (Exception e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage("could not reload roles file [{}]. Current roles remain unmodified",
|
||||
file.toAbsolutePath()),
|
||||
e);
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"could not reload roles file [{}]. Current roles remain unmodified", file.toAbsolutePath()), e);
|
||||
return;
|
||||
}
|
||||
listener.onRefresh();
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.authz.store;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
|
@ -152,7 +153,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
poller.doRun();
|
||||
} catch (Exception e) {
|
||||
logger.warn(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to perform initial poll of roles index [{}]. scheduling again in [{}]",
|
||||
SECURITY_INDEX_NAME,
|
||||
pollInterval),
|
||||
|
@ -248,7 +249,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("unable to retrieve roles {}", Arrays.toString(names)), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to retrieve roles {}", Arrays.toString(names)), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
@ -314,12 +315,12 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to put role [{}]", request.name()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to put role [{}]", request.name()), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("unable to put role [{}]", request.name()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to put role [{}]", request.name()), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
|
||||
|
@ -428,12 +429,12 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
public void onFailure(Exception t) {
|
||||
if (t instanceof IndexNotFoundException) {
|
||||
logger.trace(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to retrieve role [{}] since security index does not exist",
|
||||
roleId),
|
||||
t);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("failed to retrieve role [{}]", roleId), t);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to retrieve role [{}]", roleId), t);
|
||||
}
|
||||
}
|
||||
}, latch));
|
||||
|
@ -458,7 +459,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
}
|
||||
});
|
||||
} catch (RuntimeException e) {
|
||||
logger.error(new ParameterizedMessage("could not get or load value from cache for role [{}]", roleId), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("could not get or load value from cache for role [{}]", roleId), e);
|
||||
}
|
||||
|
||||
return roleAndVersion;
|
||||
|
@ -469,7 +470,9 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
GetRequest request = client.prepareGet(SecurityTemplateService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, role).request();
|
||||
client.get(request, listener);
|
||||
} catch (IndexNotFoundException e) {
|
||||
logger.trace(new ParameterizedMessage("unable to retrieve role [{}] since security index does not exist", role), e);
|
||||
logger.trace(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"unable to retrieve role [{}] since security index does not exist", role), e);
|
||||
listener.onResponse(new GetResponse(
|
||||
new GetResult(SecurityTemplateService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, role, -1, false, null, null)));
|
||||
} catch (Exception e) {
|
||||
|
@ -489,7 +492,8 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
@Override
|
||||
public void onFailure(Exception t) {
|
||||
// Not really much to do here except for warn about it...
|
||||
logger.warn(new ParameterizedMessage("failed to clear scroll [{}] after retrieving roles", scrollId), t);
|
||||
logger.warn(
|
||||
(Supplier<?>) () -> new ParameterizedMessage("failed to clear scroll [{}] after retrieving roles", scrollId), t);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -525,7 +529,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(new ParameterizedMessage("unable to clear cache for role [{}]", role), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("unable to clear cache for role [{}]", role), e);
|
||||
ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + role
|
||||
+ "] failed. please clear the role cache manually", e);
|
||||
listener.onFailure(exception);
|
||||
|
@ -565,7 +569,7 @@ public class NativeRolesStore extends AbstractComponent implements RolesStore, C
|
|||
try {
|
||||
return RoleDescriptor.parse(name, sourceBytes);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("error in the format of data for role [{}]", name), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("error in the format of data for role [{}]", name), e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.rest;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
@ -107,7 +108,8 @@ public class SecurityRestFilter extends RestFilter {
|
|||
assert sslEngine.getNeedClientAuth() == false;
|
||||
assert sslEngine.getWantClientAuth();
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(new ParameterizedMessage("SSL Peer did not present a certificate on channel [{}]", channel), e);
|
||||
logger.trace(
|
||||
(Supplier<?>) () -> new ParameterizedMessage("SSL Peer did not present a certificate on channel [{}]", channel), e);
|
||||
} else if (logger.isDebugEnabled()) {
|
||||
logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.transport;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.transport.DelegatingTransportChannel;
|
||||
|
@ -114,7 +115,9 @@ public interface ServerTransportFilter {
|
|||
assert sslEngine.getNeedClientAuth() == false;
|
||||
assert sslEngine.getWantClientAuth();
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(new ParameterizedMessage("SSL Peer did not present a certificate on channel [{}]", channel), e);
|
||||
logger.trace(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"SSL Peer did not present a certificate on channel [{}]", channel), e);
|
||||
} else if (logger.isDebugEnabled()) {
|
||||
logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.transport.netty3;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.jboss.netty.channel.ChannelFuture;
|
||||
import org.jboss.netty.channel.ChannelFutureListener;
|
||||
import org.jboss.netty.channel.ChannelHandlerContext;
|
||||
|
@ -70,7 +71,9 @@ public class Netty3HandshakeWaitingHandler extends SimpleChannelHandler {
|
|||
} else {
|
||||
Throwable cause = handshakeFuture.getCause();
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(new ParameterizedMessage("SSL/TLS handshake failed, closing channel: {}", cause.getMessage()), cause);
|
||||
logger.debug(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"SSL/TLS handshake failed, closing channel: {}", cause.getMessage()), cause);
|
||||
} else {
|
||||
logger.error("SSL/TLS handshake failed, closing channel: {}", cause.getMessage());
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.transport.netty3;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
|
@ -75,7 +76,7 @@ public class SecurityNetty3HttpServerTransport extends Netty3HttpServerTransport
|
|||
if (isNotSslRecordException(t)) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"received plaintext http traffic on a https channel, closing connection {}",
|
||||
ctx.getChannel()),
|
||||
t);
|
||||
|
@ -85,7 +86,7 @@ public class SecurityNetty3HttpServerTransport extends Netty3HttpServerTransport
|
|||
ctx.getChannel().close();
|
||||
} else if (isCloseDuringHandshakeException(t)) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(new ParameterizedMessage("connection {} closed during handshake", ctx.getChannel()), t);
|
||||
logger.trace((Supplier<?>) () -> new ParameterizedMessage("connection {} closed during handshake", ctx.getChannel()), t);
|
||||
} else {
|
||||
logger.warn("connection {} closed during handshake", ctx.getChannel());
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.transport.netty3;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.internal.Nullable;
|
||||
|
@ -128,14 +129,15 @@ public class SecurityNetty3Transport extends Netty3Transport {
|
|||
if (isNotSslRecordException(e)) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(
|
||||
new ParameterizedMessage("received plaintext traffic on a encrypted channel, closing connection {}", channel), e);
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"received plaintext traffic on a encrypted channel, closing connection {}", channel), e);
|
||||
} else {
|
||||
logger.warn("received plaintext traffic on a encrypted channel, closing connection {}", channel);
|
||||
}
|
||||
disconnectFromNodeChannel(channel, e);
|
||||
} else if (isCloseDuringHandshakeException(e)) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(new ParameterizedMessage("connection {} closed during handshake", channel), e);
|
||||
logger.trace((Supplier<?>) () -> new ParameterizedMessage("connection {} closed during handshake", channel), e);
|
||||
} else {
|
||||
logger.warn("connection {} closed during handshake", channel);
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import io.netty.channel.ChannelHandler;
|
|||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.handler.ssl.SslHandler;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
|
@ -70,7 +71,7 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport
|
|||
if (isNotSslRecordException(cause)) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"received plaintext http traffic on a https channel, closing connection {}",
|
||||
ctx.channel()),
|
||||
cause);
|
||||
|
@ -80,7 +81,7 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport
|
|||
ctx.channel().close();
|
||||
} else if (isCloseDuringHandshakeException(cause)) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(new ParameterizedMessage("connection {} closed during handshake", ctx.channel()), cause);
|
||||
logger.trace((Supplier<?>) () -> new ParameterizedMessage("connection {} closed during handshake", ctx.channel()), cause);
|
||||
} else {
|
||||
logger.warn("connection {} closed during handshake", ctx.channel());
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.notification.email.attachment;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
|
@ -110,7 +111,7 @@ public class HttpEmailAttachementParser implements EmailAttachmentParser<HttpReq
|
|||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"Error executing HTTP request: [host[{}], port[{}], method[{}], path[{}]: [{}]",
|
||||
httpRequest.host(),
|
||||
httpRequest.port(),
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.notification.email.support;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.transport.PortsRange;
|
||||
import org.subethamail.smtp.TooMuchDataException;
|
||||
|
@ -111,7 +112,8 @@ public class EmailServer {
|
|||
return true;
|
||||
} catch (RuntimeException re) {
|
||||
if (re.getCause() instanceof BindException) {
|
||||
logger.warn(new ParameterizedMessage("port [{}] was already in use trying next port", port), re);
|
||||
logger.warn(
|
||||
(Supplier<?>) () -> new ParameterizedMessage("port [{}] was already in use trying next port", port), re);
|
||||
return false;
|
||||
} else {
|
||||
throw re;
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
|
@ -202,7 +203,7 @@ public class WatcherLifeCycleService extends AbstractComponent implements Cluste
|
|||
@Override
|
||||
public void onFailure(String source, Exception throwable) {
|
||||
latch.countDown();
|
||||
logger.warn(new ParameterizedMessage("couldn't update watcher metadata [{}]", source), throwable);
|
||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("couldn't update watcher metadata [{}]", source), throwable);
|
||||
}
|
||||
});
|
||||
try {
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.actions;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -133,8 +134,8 @@ public class ActionWrapper implements ToXContent {
|
|||
payload = transformResult.payload();
|
||||
} catch (Exception e) {
|
||||
action.logger().error(
|
||||
new ParameterizedMessage("failed to execute action [{}/{}]. failed to transform payload.", ctx.watch().id(), id),
|
||||
e);
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to execute action [{}/{}]. failed to transform payload.", ctx.watch().id(), id), e);
|
||||
return new ActionWrapper.Result(id, conditionResult, null,
|
||||
new Action.Result.Failure(action.type(), "Failed to transform payload. error: {}",
|
||||
ExceptionsHelper.detailedMessage(e)));
|
||||
|
@ -144,7 +145,8 @@ public class ActionWrapper implements ToXContent {
|
|||
Action.Result actionResult = action.execute(id, ctx, payload);
|
||||
return new ActionWrapper.Result(id, conditionResult, transformResult, actionResult);
|
||||
} catch (Exception e) {
|
||||
action.logger().error(new ParameterizedMessage("failed to execute action [{}/{}]", ctx.watch().id(), id), e);
|
||||
action.logger().error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage("failed to execute action [{}/{}]", ctx.watch().id(), id), e);
|
||||
return new ActionWrapper.Result(id, new Action.Result.Failure(action.type(), ExceptionsHelper.detailedMessage(e)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.execution;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -36,7 +37,7 @@ public class AsyncTriggerListener implements TriggerEngine.Listener {
|
|||
executionService.processEventsAsync(events);
|
||||
} catch (Exception e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to process triggered events [{}]",
|
||||
(Object) stream(events.spliterator(), false).toArray(size -> new TriggerEvent[size])),
|
||||
e);
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.execution;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
|
@ -200,7 +201,7 @@ public class ExecutionService extends AbstractComponent {
|
|||
try {
|
||||
executeAsync(contexts.get(slot), triggeredWatch);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to execute watch [{}]", triggeredWatch.id()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute watch [{}]", triggeredWatch.id()), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -281,14 +282,14 @@ public class ExecutionService extends AbstractComponent {
|
|||
historyStore.put(record);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to update watch record [{}]", ctx.id()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to update watch record [{}]", ctx.id()), e);
|
||||
// TODO log watch record in logger, when saving in history store failed, otherwise the info is gone!
|
||||
}
|
||||
}
|
||||
try {
|
||||
triggeredWatchStore.delete(ctx.id());
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to delete triggered watch [{}]", ctx.id()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to delete triggered watch [{}]", ctx.id()), e);
|
||||
}
|
||||
currentExecutions.remove(ctx.watch().id());
|
||||
if (logger.isTraceEnabled()) {
|
||||
|
@ -316,7 +317,7 @@ public class ExecutionService extends AbstractComponent {
|
|||
private void logWatchRecord(WatchExecutionContext ctx, Exception e) {
|
||||
// failed watches stack traces are only logged in debug, otherwise they should be checked out in the history
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(new ParameterizedMessage("failed to execute watch [{}]", ctx.id()), e);
|
||||
logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to execute watch [{}]", ctx.id()), e);
|
||||
} else {
|
||||
logger.warn("Failed to execute watch [{}]", ctx.id());
|
||||
}
|
||||
|
@ -462,7 +463,8 @@ public class ExecutionService extends AbstractComponent {
|
|||
try {
|
||||
execute(ctx);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("could not execute watch [{}]/[{}]", ctx.watch().id(), ctx.id()), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage("could not execute watch [{}]/[{}]", ctx.watch().id(), ctx.id()), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.execution;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -36,7 +37,7 @@ public class SyncTriggerListener implements TriggerEngine.Listener {
|
|||
executionService.processEventsSync(events);
|
||||
} catch (Exception e) {
|
||||
logger.error(
|
||||
new ParameterizedMessage(
|
||||
(Supplier<?>) () -> new ParameterizedMessage(
|
||||
"failed to process triggered events [{}]",
|
||||
(Object) stream(events.spliterator(), false).toArray(size -> new TriggerEvent[size])),
|
||||
e);
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.execution;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||
|
@ -269,7 +270,8 @@ public class TriggeredWatchStore extends AbstractComponent {
|
|||
logger.debug("loaded triggered watch [{}/{}/{}]", sh.index(), sh.type(), sh.id());
|
||||
triggeredWatches.add(triggeredWatch);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("couldn't load triggered watch [{}], ignoring it...", id), e);
|
||||
logger.error(
|
||||
(Supplier<?>) () -> new ParameterizedMessage("couldn't load triggered watch [{}], ignoring it...", id), e);
|
||||
}
|
||||
}
|
||||
response = client.searchScroll(response.getScrollId(), scrollTimeout);
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.input.chain;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.input.ExecutableInput;
|
||||
|
@ -43,7 +44,7 @@ public class ExecutableChainInput extends ExecutableInput<ChainInput,ChainInput.
|
|||
|
||||
return new ChainInput.Result(results, new Payload.Simple(payloads));
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to execute [{}] input for [{}]", TYPE, ctx.watch().id()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute [{}] input for [{}]", TYPE, ctx.watch().id()), e);
|
||||
return new ChainInput.Result(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.input.http;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -48,7 +49,7 @@ public class ExecutableHttpInput extends ExecutableInput<HttpInput, HttpInput.Re
|
|||
request = input.getRequest().render(templateEngine, model);
|
||||
return doExecute(ctx, request);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to execute [{}] input for [{}]", TYPE, ctx.watch()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute [{}] input for [{}]", TYPE, ctx.watch()), e);
|
||||
return new HttpInput.Result(request, e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.input.search;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -59,7 +60,7 @@ public class ExecutableSearchInput extends ExecutableInput<SearchInput, SearchIn
|
|||
request = new WatcherSearchTemplateRequest(input.getRequest(), renderedTemplate);
|
||||
return doExecute(ctx, request);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to execute [{}] input for [{}]", TYPE, ctx.watch()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute [{}] input for [{}]", TYPE, ctx.watch()), e);
|
||||
return new SearchInput.Result(request, e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.transform.chain;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext;
|
||||
import org.elasticsearch.xpack.watcher.transform.ExecutableTransform;
|
||||
import org.elasticsearch.xpack.watcher.transform.Transform;
|
||||
|
@ -47,7 +48,7 @@ public class ExecutableChainTransform extends ExecutableTransform<ChainTransform
|
|||
try {
|
||||
return doExecute(ctx, payload, results);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to execute [{}] transform for [{}]", TYPE, ctx.id()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute [{}] transform for [{}]", TYPE, ctx.id()), e);
|
||||
return new ChainTransform.Result(e, results);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.transform.script;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
|
@ -46,7 +47,7 @@ public class ExecutableScriptTransform extends ExecutableTransform<ScriptTransfo
|
|||
try {
|
||||
return doExecute(ctx, payload);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to execute [{}] transform for [{}]", TYPE, ctx.id()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute [{}] transform for [{}]", TYPE, ctx.id()), e);
|
||||
return new ScriptTransform.Result(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.transform.search;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -52,7 +53,7 @@ public class ExecutableSearchTransform extends ExecutableTransform<SearchTransfo
|
|||
SearchResponse resp = client.search(searchTemplateService.toSearchRequest(request), timeout);
|
||||
return new SearchTransform.Result(request, new Payload.XContent(resp));
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to execute [{}] transform for [{}]", TYPE, ctx.id()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute [{}] transform for [{}]", TYPE, ctx.id()), e);
|
||||
return new SearchTransform.Result(request, e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.transport.actions.execute;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
|
@ -126,7 +127,7 @@ public class TransportExecuteWatchAction extends WatcherTransportAction<ExecuteW
|
|||
ExecuteWatchResponse response = new ExecuteWatchResponse(record.id().value(), builder.bytes(), XContentType.JSON);
|
||||
listener.onResponse(response);
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to execute [{}]", request.getId()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to execute [{}]", request.getId()), e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.transport.actions.get;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
|
@ -88,7 +89,7 @@ public class TransportGetWatchAction extends WatcherTransportAction<GetWatchRequ
|
|||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("failed to get watch [{}]", request.getId()), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to get watch [{}]", request.getId()), e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.watch;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -64,7 +65,7 @@ public class WatchLockService extends AbstractComponent {
|
|||
} catch (InterruptedException ie) {
|
||||
Thread.currentThread().interrupt();
|
||||
//todo figure out a better std exception for this
|
||||
logger.error(new ParameterizedMessage("could not acquire lock for watch [{}]", name), ie);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("could not acquire lock for watch [{}]", name), ie);
|
||||
return null;
|
||||
}
|
||||
return new Lock(name, watchLocks);
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.watch;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||
|
@ -93,7 +94,7 @@ public class WatchStore extends AbstractComponent {
|
|||
logger.debug("loaded [{}] watches from the watches index [{}]", count, INDEX);
|
||||
started.set(true);
|
||||
} catch (Exception e) {
|
||||
logger.debug(new ParameterizedMessage("failed to load watches for watch index [{}]", INDEX), e);
|
||||
logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to load watches for watch index [{}]", INDEX), e);
|
||||
watches.clear();
|
||||
throw e;
|
||||
}
|
||||
|
@ -315,7 +316,7 @@ public class WatchStore extends AbstractComponent {
|
|||
watches.put(id, watch);
|
||||
count++;
|
||||
} catch (Exception e) {
|
||||
logger.error(new ParameterizedMessage("couldn't load watch [{}], ignoring it...", id), e);
|
||||
logger.error((Supplier<?>) () -> new ParameterizedMessage("couldn't load watch [{}], ignoring it...", id), e);
|
||||
}
|
||||
}
|
||||
response = client.searchScroll(response.getScrollId(), scrollTimeout);
|
||||
|
|
Loading…
Reference in New Issue