Core: Replace deprecated Loggers calls with LogManager. (#34691)

Replace deprecated Loggers calls with LogManager.

Relates to #32174
This commit is contained in:
Pratik Sanglikar 2018-10-29 12:52:30 -07:00 committed by Nik Everett
parent 6169e9345e
commit f1135ef0ce
93 changed files with 205 additions and 200 deletions

View File

@ -27,6 +27,7 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
public class LegacyDelimitedPayloadTokenFilterFactory extends DelimitedPayloadTokenFilterFactory {
private static final DeprecationLogger deprecationLogger =
new DeprecationLogger(LogManager.getLogger(LegacyDelimitedPayloadTokenFilterFactory.class));

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.reindex;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
@ -26,7 +27,6 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.ingest.DeletePipelineRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.engine.Engine;
@ -298,7 +298,7 @@ public class CancelTests extends ReindexTestCase {
}
public static class BlockingOperationListener implements IndexingOperationListener {
private static final Logger log = Loggers.getLogger(CancelTests.class);
private static final Logger log = LogManager.getLogger(CancelTests.class);
@Override
public Engine.Index preIndex(ShardId shardId, Engine.Index index) {

View File

@ -20,6 +20,7 @@
package org.elasticsearch.transport.netty4;
import io.netty.util.internal.logging.AbstractInternalLogger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.common.SuppressLoggerChecks;

View File

@ -20,6 +20,7 @@
package org.elasticsearch.transport.netty4;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.ESNetty4IntegTestCase;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest;
import org.elasticsearch.common.logging.Loggers;
@ -37,14 +38,14 @@ public class ESLoggingHandlerIT extends ESNetty4IntegTestCase {
public void setUp() throws Exception {
super.setUp();
appender = new MockLogAppender();
Loggers.addAppender(Loggers.getLogger(ESLoggingHandler.class), appender);
Loggers.addAppender(Loggers.getLogger(TransportLogger.class), appender);
Loggers.addAppender(LogManager.getLogger(ESLoggingHandler.class), appender);
Loggers.addAppender(LogManager.getLogger(TransportLogger.class), appender);
appender.start();
}
public void tearDown() throws Exception {
Loggers.removeAppender(Loggers.getLogger(ESLoggingHandler.class), appender);
Loggers.removeAppender(Loggers.getLogger(TransportLogger.class), appender);
Loggers.removeAppender(LogManager.getLogger(ESLoggingHandler.class), appender);
Loggers.removeAppender(LogManager.getLogger(TransportLogger.class), appender);
appender.stop();
super.tearDown();
}

View File

@ -19,11 +19,11 @@
package org.elasticsearch.plugin.discovery.azure.classic;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cloud.azure.classic.management.AzureComputeService;
import org.elasticsearch.cloud.azure.classic.management.AzureComputeServiceImpl;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
@ -43,7 +43,7 @@ public class AzureDiscoveryPlugin extends Plugin implements DiscoveryPlugin {
public static final String AZURE = "azure";
protected final Settings settings;
private static final Logger logger = Loggers.getLogger(AzureDiscoveryPlugin.class);
private static final Logger logger = LogManager.getLogger(AzureDiscoveryPlugin.class);
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger);
public AzureDiscoveryPlugin(Settings settings) {

View File

@ -24,10 +24,10 @@ import com.microsoft.windowsazure.management.compute.models.DeploymentStatus;
import com.sun.net.httpserver.Headers;
import com.sun.net.httpserver.HttpsConfigurator;
import com.sun.net.httpserver.HttpsServer;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.cloud.azure.classic.management.AzureComputeService;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.DiscoveryModule;
@ -243,7 +243,7 @@ public class AzureDiscoveryClusterFormationTests extends ESIntegTestCase {
responseBody.write(responseAsBytes);
responseBody.close();
} catch (XMLStreamException e) {
Loggers.getLogger(AzureDiscoveryClusterFormationTests.class).error("Failed serializing XML", e);
LogManager.getLogger(AzureDiscoveryClusterFormationTests.class).error("Failed serializing XML", e);
throw new RuntimeException(e);
}
});

View File

@ -24,9 +24,9 @@ import com.amazonaws.Protocol;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.BasicSessionCredentials;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.SecureSetting;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Setting;
@ -75,7 +75,7 @@ final class Ec2ClientSettings {
static final Setting<TimeValue> READ_TIMEOUT_SETTING = Setting.timeSetting("discovery.ec2.read_timeout",
TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope);
private static final Logger logger = Loggers.getLogger(Ec2ClientSettings.class);
private static final Logger logger = LogManager.getLogger(Ec2ClientSettings.class);
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger);

View File

@ -20,10 +20,10 @@
package org.elasticsearch.discovery.ec2;
import com.amazonaws.util.json.Jackson;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
@ -52,7 +52,7 @@ import java.util.function.Supplier;
public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, ReloadablePlugin {
private static Logger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class);
private static Logger logger = LogManager.getLogger(Ec2DiscoveryPlugin.class);
public static final String EC2 = "ec2";
static {

View File

@ -19,9 +19,9 @@
package org.elasticsearch.cloud.gce;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
public class GceModule extends AbstractModule {
@ -29,7 +29,7 @@ public class GceModule extends AbstractModule {
static Class<? extends GceInstancesService> computeServiceImpl = GceInstancesServiceImpl.class;
protected final Settings settings;
protected final Logger logger = Loggers.getLogger(GceModule.class);
protected final Logger logger = LogManager.getLogger(GceModule.class);
public GceModule(Settings settings) {
this.settings = settings;

View File

@ -21,19 +21,19 @@ package org.elasticsearch.plugin.discovery.gce;
import com.google.api.client.http.HttpHeaders;
import com.google.api.client.util.ClassInfo;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.core.internal.io.IOUtils;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.cloud.gce.GceInstancesService;
import org.elasticsearch.cloud.gce.GceInstancesServiceImpl;
import org.elasticsearch.cloud.gce.GceMetadataService;
import org.elasticsearch.cloud.gce.network.GceNameResolver;
import org.elasticsearch.cloud.gce.util.Access;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.discovery.gce.GceUnicastHostsProvider;
import org.elasticsearch.discovery.zen.UnicastHostsProvider;
import org.elasticsearch.plugins.DiscoveryPlugin;
@ -57,7 +57,7 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
public static final String GCE = "gce";
protected final Settings settings;
private static final Logger logger = Loggers.getLogger(GceDiscoveryPlugin.class);
private static final Logger logger = LogManager.getLogger(GceDiscoveryPlugin.class);
// stashed when created in order to properly close
private final SetOnce<GceInstancesService> gceInstancesService = new SetOnce<>();

View File

@ -26,18 +26,18 @@ import com.google.api.client.json.Json;
import com.google.api.client.testing.http.MockHttpTransport;
import com.google.api.client.testing.http.MockLowLevelHttpRequest;
import com.google.api.client.testing.http.MockLowLevelHttpResponse;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
public class GceMockUtils {
protected static final Logger logger = Loggers.getLogger(GceMockUtils.class);
protected static final Logger logger = LogManager.getLogger(GceMockUtils.class);
public static final String GCE_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/";

View File

@ -19,11 +19,11 @@
package org.elasticsearch.ingest.geoip;
import com.maxmind.geoip2.DatabaseReader;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.core.internal.io.IOUtils;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.core.internal.io.IOUtils;
import java.io.Closeable;
import java.io.IOException;
@ -34,7 +34,7 @@ import java.io.IOException;
*/
final class DatabaseReaderLazyLoader implements Closeable {
private static final Logger LOGGER = Loggers.getLogger(DatabaseReaderLazyLoader.class);
private static final Logger LOGGER = LogManager.getLogger(DatabaseReaderLazyLoader.class);
private final String databaseFileName;
private final CheckedSupplier<DatabaseReader, IOException> loader;

View File

@ -21,12 +21,12 @@ package org.elasticsearch.repositories.azure;
import com.microsoft.azure.storage.LocationMode;
import com.microsoft.azure.storage.StorageException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOException;
import java.io.InputStream;
@ -37,7 +37,7 @@ import java.util.Map;
public class AzureBlobContainer extends AbstractBlobContainer {
private final Logger logger = Loggers.getLogger(AzureBlobContainer.class);
private final Logger logger = LogManager.getLogger(AzureBlobContainer.class);
private final AzureBlobStore blobStore;
private final String keyPath;

View File

@ -18,15 +18,6 @@
*/
package org.elasticsearch.repositories.hdfs;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.InetAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Locale;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.AbstractFileSystem;
import org.apache.hadoop.fs.FileContext;
@ -36,13 +27,13 @@ import org.apache.hadoop.io.retry.FailoverProxyProvider;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
@ -50,9 +41,18 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.env.Environment;
import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.InetAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Locale;
public final class HdfsRepository extends BlobStoreRepository {
private static final Logger LOGGER = Loggers.getLogger(HdfsRepository.class);
private static final Logger LOGGER = LogManager.getLogger(HdfsRepository.class);
private static final String CONF_SECURITY_PRINCIPAL = "security.principal";

View File

@ -20,6 +20,7 @@
package org.elasticsearch.transport.nio;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.NioIntegTestCase;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest;
import org.elasticsearch.common.logging.Loggers;
@ -37,12 +38,12 @@ public class NioTransportLoggingIT extends NioIntegTestCase {
public void setUp() throws Exception {
super.setUp();
appender = new MockLogAppender();
Loggers.addAppender(Loggers.getLogger(TransportLogger.class), appender);
Loggers.addAppender(LogManager.getLogger(TransportLogger.class), appender);
appender.start();
}
public void tearDown() throws Exception {
Loggers.removeAppender(Loggers.getLogger(TransportLogger.class), appender);
Loggers.removeAppender(LogManager.getLogger(TransportLogger.class), appender);
appender.stop();
super.tearDown();
}

View File

@ -29,7 +29,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -131,8 +130,8 @@ import java.io.UncheckedIOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.ZoneId;
import java.security.Security;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.test.junit.listeners;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.junit.runner.Description;

View File

@ -20,8 +20,8 @@
package org.elasticsearch.test.test;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.junit.listeners.LoggingListener;

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.license;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.License.OperationMode;
import org.elasticsearch.xpack.core.XPackField;
@ -309,7 +309,7 @@ public class XPackLicenseState {
// Before 6.3, Trial licenses would default having security enabled.
// If this license was generated before that version, then treat it as if security is explicitly enabled
if (mostRecentTrialVersion == null || mostRecentTrialVersion.before(Version.V_6_3_0)) {
Loggers.getLogger(getClass()).info("Automatically enabling security for older trial license ({})",
LogManager.getLogger(getClass()).info("Automatically enabling security for older trial license ({})",
mostRecentTrialVersion == null ? "[pre 6.1.0]" : mostRecentTrialVersion.toString());
isSecurityEnabledByTrialVersion = true;
}

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.core.ml.datafeed.extractor;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
@ -34,7 +34,7 @@ import java.util.concurrent.TimeUnit;
*/
public final class ExtractorUtils {
private static final Logger LOGGER = Loggers.getLogger(ExtractorUtils.class);
private static final Logger LOGGER = LogManager.getLogger(ExtractorUtils.class);
private static final String EPOCH_MILLIS = "epoch_millis";
private ExtractorUtils() {}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.core.security.authz.permission;
import org.apache.logging.log4j.LogManager;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.ElasticsearchSecurityException;
@ -14,7 +15,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl;
import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege;
import org.elasticsearch.xpack.core.security.support.Automatons;
@ -67,7 +67,7 @@ public final class IndicesPermission implements Iterable<IndicesPermission.Group
try {
return Automatons.predicate(indices);
} catch (TooComplexToDeterminizeException e) {
Loggers.getLogger(IndicesPermission.class).debug("Index pattern automaton [{}] is too complex", indices);
LogManager.getLogger(IndicesPermission.class).debug("Index pattern automaton [{}] is too complex", indices);
String description = Strings.collectionToCommaDelimitedString(indices);
if (description.length() > 80) {
description = Strings.cleanTruncate(description, 80) + "...";

View File

@ -5,10 +5,10 @@
*/
package org.elasticsearch.xpack.logstash;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
@ -59,7 +59,7 @@ public class Logstash extends Plugin implements ActionPlugin {
public UnaryOperator<Map<String, IndexTemplateMetaData>> getIndexTemplateMetaDataUpgrader() {
return templates -> {
TemplateUtils.loadTemplateIntoMap("/" + LOGSTASH_TEMPLATE_NAME + ".json", templates, LOGSTASH_TEMPLATE_NAME,
Version.CURRENT.toString(), TEMPLATE_VERSION_PATTERN, Loggers.getLogger(Logstash.class));
Version.CURRENT.toString(), TEMPLATE_VERSION_PATTERN, LogManager.getLogger(Logstash.class));
return templates;
};
}

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.ml.integration;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -204,7 +204,7 @@ public class CategorizationIT extends MlNativeAutodetectIntegTestCase {
flushJob(jobId, false);
long duration = System.currentTimeMillis() - startTime;
Loggers.getLogger(CategorizationIT.class).info("Performance test with tokenization in " +
LogManager.getLogger(CategorizationIT.class).info("Performance test with tokenization in " +
(MachineLearning.CATEGORIZATION_TOKENIZATION_IN_JAVA ? "Java" : "C++") + " took " + duration + "ms");
}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.ElasticsearchException;
@ -22,7 +23,6 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
@ -264,7 +264,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
public static final Setting<Integer> MAX_LAZY_ML_NODES =
Setting.intSetting("xpack.ml.max_lazy_ml_nodes", 0, 0, 3, Property.Dynamic, Property.NodeScope);
private static final Logger logger = Loggers.getLogger(XPackPlugin.class);
private static final Logger logger = LogManager.getLogger(XPackPlugin.class);
private final Settings settings;
private final Environment env;

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml;
import org.apache.logging.log4j.LogManager;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.Counter;
import org.elasticsearch.ElasticsearchException;
@ -16,7 +17,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.env.Environment;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.plugins.Platforms;
@ -82,7 +82,7 @@ public class MachineLearningFeatureSet implements XPackFeatureSet {
}
}
} catch (IOException | TimeoutException e) {
Loggers.getLogger(MachineLearningFeatureSet.class).error("Cannot get native code info for Machine Learning", e);
LogManager.getLogger(MachineLearningFeatureSet.class).error("Cannot get native code info for Machine Learning", e);
throw new ElasticsearchException("Cannot communicate with Machine Learning native code");
}
}

View File

@ -5,12 +5,12 @@
*/
package org.elasticsearch.xpack.ml;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.FutureUtils;
@ -32,7 +32,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
*/
public class MlDailyMaintenanceService implements Releasable {
private static final Logger LOGGER = Loggers.getLogger(MlDailyMaintenanceService.class);
private static final Logger LOGGER = LogManager.getLogger(MlDailyMaintenanceService.class);
private static final int MAX_TIME_OFFSET_MINUTES = 120;

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml.action;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
@ -34,7 +35,6 @@ import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -789,7 +789,7 @@ public class TransportOpenJobAction extends TransportMasterNodeAction<OpenJobAct
public static class JobTask extends AllocatedPersistentTask implements OpenJobAction.JobTaskMatcher {
private static final Logger LOGGER = Loggers.getLogger(JobTask.class);
private static final Logger LOGGER = LogManager.getLogger(JobTask.class);
private final String jobId;
private volatile AutodetectProcessManager autodetectProcessManager;

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.ml.datafeed;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentType;
@ -20,10 +20,10 @@ import org.elasticsearch.xpack.core.ml.action.FlushJobAction;
import org.elasticsearch.xpack.core.ml.action.PersistJobAction;
import org.elasticsearch.xpack.core.ml.action.PostDataAction;
import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import org.elasticsearch.xpack.ml.notifications.Auditor;
import java.io.ByteArrayOutputStream;
@ -39,7 +39,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin;
class DatafeedJob {
private static final Logger LOGGER = Loggers.getLogger(DatafeedJob.class);
private static final Logger LOGGER = LogManager.getLogger(DatafeedJob.class);
private static final int NEXT_TASK_DELAY_MS = 100;
private final Auditor auditor;

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.ml.datafeed;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.license.RemoteClusterLicenseChecker;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.xpack.core.ml.MlMetadata;
@ -26,7 +26,7 @@ import java.util.Objects;
public class DatafeedNodeSelector {
private static final Logger LOGGER = Loggers.getLogger(DatafeedNodeSelector.class);
private static final Logger LOGGER = LogManager.getLogger(DatafeedNodeSelector.class);
private final DatafeedConfig datafeed;
private final PersistentTasksCustomMetaData.PersistentTask<?> jobTask;

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.xpack.core.ClientHelper;
@ -36,7 +36,7 @@ import java.util.stream.Collectors;
*/
class AggregationDataExtractor implements DataExtractor {
private static final Logger LOGGER = Loggers.getLogger(AggregationDataExtractor.class);
private static final Logger LOGGER = LogManager.getLogger(AggregationDataExtractor.class);
/**
* The number of key-value pairs written in each batch to process.

View File

@ -5,17 +5,17 @@
*/
package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
import org.elasticsearch.search.aggregations.metrics.Max;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
import org.elasticsearch.search.aggregations.metrics.Percentile;
import org.elasticsearch.search.aggregations.metrics.Percentiles;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -42,7 +42,7 @@ import java.util.TreeMap;
*/
class AggregationToJsonProcessor {
private static final Logger LOGGER = Loggers.getLogger(AggregationToJsonProcessor.class);
private static final Logger LOGGER = LogManager.getLogger(AggregationToJsonProcessor.class);
private final String timeField;
private final Set<String> fields;

View File

@ -5,12 +5,12 @@
*/
package org.elasticsearch.xpack.ml.datafeed.extractor.chunked;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.metrics.Max;
@ -43,7 +43,7 @@ import java.util.Optional;
*/
public class ChunkedDataExtractor implements DataExtractor {
private static final Logger LOGGER = Loggers.getLogger(ChunkedDataExtractor.class);
private static final Logger LOGGER = LogManager.getLogger(ChunkedDataExtractor.class);
private static final String EARLIEST_TIME = "earliest_time";
private static final String LATEST_TIME = "latest_time";

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.search.ClearScrollAction;
import org.elasticsearch.action.search.ClearScrollRequest;
@ -15,7 +16,6 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollAction;
import org.elasticsearch.action.search.SearchScrollRequestBuilder;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.StoredFieldsContext;
@ -42,7 +42,7 @@ import java.util.concurrent.TimeUnit;
*/
class ScrollDataExtractor implements DataExtractor {
private static final Logger LOGGER = Loggers.getLogger(ScrollDataExtractor.class);
private static final Logger LOGGER = LogManager.getLogger(ScrollDataExtractor.class);
private static final TimeValue SCROLL_TIMEOUT = new TimeValue(30, TimeUnit.MINUTES);
private static final String EPOCH_MILLIS_FORMAT = "epoch_millis";

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml.job;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException;
@ -13,7 +14,6 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.component.LifecycleListener;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.threadpool.ThreadPool;
@ -48,7 +48,7 @@ import static org.elasticsearch.xpack.core.ml.action.UpdateProcessAction.Respons
*/
public class UpdateJobProcessNotifier extends AbstractComponent {
private static final Logger LOGGER = Loggers.getLogger(UpdateJobProcessNotifier.class);
private static final Logger LOGGER = LogManager.getLogger(UpdateJobProcessNotifier.class);
private final Client client;
private final ClusterService clusterService;

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.ml.job.categorization;
import org.elasticsearch.common.logging.Loggers;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.grok.Grok;
import java.util.ArrayList;
@ -112,7 +112,7 @@ public final class GrokPatternCreator {
// We should never get here. If we do it implies a bug in the original categorization,
// as it's produced a regex that doesn't match the examples.
assert matcher.matches() : exampleProcessor.pattern() + " did not match " + example;
Loggers.getLogger(GrokPatternCreator.class).error("[{}] Pattern [{}] did not match example [{}]", jobId,
LogManager.getLogger(GrokPatternCreator.class).error("[{}] Pattern [{}] did not match example [{}]", jobId,
exampleProcessor.pattern(), example);
}
}

View File

@ -5,12 +5,12 @@
*/
package org.elasticsearch.xpack.ml.job.persistence;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
@ -29,7 +29,7 @@ import java.util.Objects;
* and iterate through them in batches.
*/
public abstract class BatchedDocumentsIterator<T> {
private static final Logger LOGGER = Loggers.getLogger(BatchedDocumentsIterator.class);
private static final Logger LOGGER = LogManager.getLogger(BatchedDocumentsIterator.class);
private static final String CONTEXT_ALIVE_DURATION = "5m";
private static final int BATCH_SIZE = 10000;

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml.job.persistence;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BulkAction;
@ -14,7 +15,6 @@ import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
@ -36,7 +36,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin;
public class JobDataDeleter {
private static final Logger LOGGER = Loggers.getLogger(JobDataDeleter.class);
private static final Logger LOGGER = LogManager.getLogger(JobDataDeleter.class);
private final Client client;
private final String jobId;

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml.job.persistence;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
@ -41,7 +42,6 @@ import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
@ -129,7 +129,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin;
public class JobResultsProvider {
private static final Logger LOGGER = Loggers.getLogger(JobResultsProvider.class);
private static final Logger LOGGER = LogManager.getLogger(JobResultsProvider.class);
private static final int RECORDS_SIZE_PARAM = 10000;
public static final int BUCKETS_FOR_ESTABLISHED_MEMORY_SIZE = 20;

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.ml.job.persistence;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
@ -33,7 +33,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin;
*/
public class StateStreamer {
private static final Logger LOGGER = Loggers.getLogger(StateStreamer.class);
private static final Logger LOGGER = LogManager.getLogger(StateStreamer.class);
private final Client client;
private volatile boolean isCancelled;

View File

@ -5,12 +5,12 @@
*/
package org.elasticsearch.xpack.ml.job.process.autodetect;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -58,7 +58,7 @@ import java.util.function.Consumer;
public class AutodetectCommunicator implements Closeable {
private static final Logger LOGGER = Loggers.getLogger(AutodetectCommunicator.class);
private static final Logger LOGGER = LogManager.getLogger(AutodetectCommunicator.class);
private static final Duration FLUSH_PROCESS_CHECK_FREQUENCY = Duration.ofSeconds(1);
private final Job job;

View File

@ -5,10 +5,10 @@
*/
package org.elasticsearch.xpack.ml.job.process.autodetect;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.core.internal.io.IOUtils;
@ -33,7 +33,7 @@ import java.util.concurrent.ExecutorService;
public class NativeAutodetectProcessFactory implements AutodetectProcessFactory {
private static final Logger LOGGER = Loggers.getLogger(NativeAutodetectProcessFactory.class);
private static final Logger LOGGER = LogManager.getLogger(NativeAutodetectProcessFactory.class);
private static final NamedPipeHelper NAMED_PIPE_HELPER = new NamedPipeHelper();
public static final Duration PROCESS_STARTUP_TIMEOUT = Duration.ofSeconds(10);

View File

@ -5,9 +5,9 @@
*/
package org.elasticsearch.xpack.ml.job.process.autodetect;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.core.ml.MachineLearningField;
import org.elasticsearch.xpack.ml.action.TransportOpenJobAction.JobTask;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
@ -21,7 +21,7 @@ import java.util.concurrent.locks.ReentrantLock;
*/
final class ProcessContext {
private static final Logger LOGGER = Loggers.getLogger(ProcessContext.class);
private static final Logger LOGGER = LogManager.getLogger(ProcessContext.class);
private final ReentrantLock lock = new ReentrantLock();
private final JobTask jobTask;

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.ml.job.process.autodetect.output;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@ -74,7 +74,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
*/
public class AutoDetectResultProcessor {
private static final Logger LOGGER = Loggers.getLogger(AutoDetectResultProcessor.class);
private static final Logger LOGGER = LogManager.getLogger(AutoDetectResultProcessor.class);
/**
* This is how far behind real-time we'll update the job with the latest established model memory.

View File

@ -5,9 +5,9 @@
*/
package org.elasticsearch.xpack.ml.job.process.autodetect.writer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer;
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
@ -41,7 +41,7 @@ import java.util.function.BiConsumer;
*/
class CsvDataToProcessWriter extends AbstractDataToProcessWriter {
private static final Logger LOGGER = Loggers.getLogger(CsvDataToProcessWriter.class);
private static final Logger LOGGER = LogManager.getLogger(CsvDataToProcessWriter.class);
/**
* Maximum number of lines allowed within a single CSV record.

View File

@ -5,9 +5,9 @@
*/
package org.elasticsearch.xpack.ml.job.process.autodetect.writer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -37,7 +37,7 @@ import java.util.function.BiConsumer;
*/
class JsonDataToProcessWriter extends AbstractDataToProcessWriter {
private static final Logger LOGGER = Loggers.getLogger(JsonDataToProcessWriter.class);
private static final Logger LOGGER = LogManager.getLogger(JsonDataToProcessWriter.class);
private NamedXContentRegistry xContentRegistry;
JsonDataToProcessWriter(boolean includeControlField, boolean includeTokensField, AutodetectProcess autodetectProcess,

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.ml.job.process.diagnostics;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
@ -23,7 +23,7 @@ public class DataStreamDiagnostics {
*/
private static final int DATA_SPARSITY_THRESHOLD = 2;
private static final Logger LOGGER = Loggers.getLogger(DataStreamDiagnostics.class);
private static final Logger LOGGER = LogManager.getLogger(DataStreamDiagnostics.class);
private final BucketDiagnostics bucketDiagnostics;

View File

@ -27,7 +27,6 @@ import java.time.ZonedDateTime;
* - It can be used to produce results in testing that do not vary based on changes to the real normalization algorithms
*/
public class MultiplyingNormalizerProcess implements NormalizerProcess {
private static final Logger LOGGER = LogManager.getLogger(MultiplyingNormalizerProcess.class);
private final double factor;

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.ml.job.process.normalizer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.ml.job.process.normalizer.output.NormalizerResultHandler;
import java.io.IOException;
@ -29,7 +29,7 @@ import java.util.concurrent.Future;
* and in exactly the same order as the inputs.
*/
public class Normalizer {
private static final Logger LOGGER = Loggers.getLogger(Normalizer.class);
private static final Logger LOGGER = LogManager.getLogger(Normalizer.class);
private final String jobId;
private final NormalizerProcessFactory processFactory;

View File

@ -5,16 +5,16 @@
*/
package org.elasticsearch.xpack.ml.job.process.normalizer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.core.ml.job.results.Bucket;
import org.elasticsearch.xpack.core.ml.job.results.Influencer;
import org.elasticsearch.xpack.core.ml.job.results.Result;
import org.elasticsearch.xpack.ml.job.persistence.BatchedDocumentsIterator;
import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider;
import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister;
import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider;
import java.util.ArrayList;
import java.util.Deque;
@ -27,7 +27,7 @@ import java.util.stream.Collectors;
* with the renormalized scores
*/
public class ScoresUpdater {
private static final Logger LOGGER = Loggers.getLogger(ScoresUpdater.class);
private static final Logger LOGGER = LogManager.getLogger(ScoresUpdater.class);
/**
* Target number of buckets to renormalize at a time

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.ml.job.process.normalizer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles;
import java.util.Deque;
@ -21,7 +21,7 @@ import java.util.concurrent.Semaphore;
*/
public class ShortCircuitingRenormalizer implements Renormalizer {
private static final Logger LOGGER = Loggers.getLogger(ShortCircuitingRenormalizer.class);
private static final Logger LOGGER = LogManager.getLogger(ShortCircuitingRenormalizer.class);
private final String jobId;
private final ScoresUpdater scoresUpdater;

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml.job.retention;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
@ -13,7 +14,6 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.ThreadedActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -55,7 +55,7 @@ import java.util.Objects;
*/
public class ExpiredForecastsRemover implements MlDataRemover {
private static final Logger LOGGER = Loggers.getLogger(ExpiredForecastsRemover.class);
private static final Logger LOGGER = LogManager.getLogger(ExpiredForecastsRemover.class);
private static final int MAX_FORECASTS = 10000;
private static final String RESULTS_INDEX_PATTERN = AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*";

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml.job.retention;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
@ -15,7 +16,6 @@ import org.elasticsearch.action.support.ThreadedActionListener;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
@ -45,7 +45,7 @@ import java.util.Objects;
*/
public class ExpiredModelSnapshotsRemover extends AbstractExpiredJobDataRemover {
private static final Logger LOGGER = Loggers.getLogger(ExpiredModelSnapshotsRemover.class);
private static final Logger LOGGER = LogManager.getLogger(ExpiredModelSnapshotsRemover.class);
/**
* The max number of snapshots to fetch per job. It is set to 10K, the default for an index as

View File

@ -5,12 +5,12 @@
*/
package org.elasticsearch.xpack.ml.job.retention;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
@ -43,7 +43,7 @@ import java.util.Objects;
*/
public class ExpiredResultsRemover extends AbstractExpiredJobDataRemover {
private static final Logger LOGGER = Loggers.getLogger(ExpiredResultsRemover.class);
private static final Logger LOGGER = LogManager.getLogger(ExpiredResultsRemover.class);
private final Client client;
private final Auditor auditor;

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.ml.job.retention;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
@ -12,7 +13,6 @@ import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
@ -35,7 +35,7 @@ import java.util.function.Function;
*/
public class UnusedStateRemover implements MlDataRemover {
private static final Logger LOGGER = Loggers.getLogger(UnusedStateRemover.class);
private static final Logger LOGGER = LogManager.getLogger(UnusedStateRemover.class);
private final Client client;
private final ClusterService clusterService;

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.ml.notifications;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -27,7 +27,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
public class Auditor {
private static final Logger LOGGER = Loggers.getLogger(Auditor.class);
private static final Logger LOGGER = LogManager.getLogger(Auditor.class);
private final Client client;
private final String nodeName;

View File

@ -5,9 +5,9 @@
*/
package org.elasticsearch.xpack.ml.process;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.ml.process.logging.CppLogMessageHandler;
import org.elasticsearch.xpack.ml.utils.NamedPipeHelper;
@ -27,8 +27,9 @@ import java.util.concurrent.TimeoutException;
/**
* Maintains the connection to the native controller daemon that can start other processes.
*/
@SuppressWarnings("ALL")
public class NativeController {
private static final Logger LOGGER = Loggers.getLogger(NativeController.class);
private static final Logger LOGGER = LogManager.getLogger(NativeController.class);
/**
* Process controller native program name

View File

@ -6,8 +6,8 @@
package org.elasticsearch.xpack.ml.process;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.env.Environment;
@ -21,7 +21,7 @@ import java.nio.file.Path;
*/
public class NativeStorageProvider {
private static final Logger LOGGER = Loggers.getLogger(NativeStorageProvider.class);
private static final Logger LOGGER = LogManager.getLogger(NativeStorageProvider.class);
private static final String LOCAL_STORAGE_SUBFOLDER = "ml-local-data";

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.process.logging;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
@ -13,7 +14,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.CompositeBytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -47,7 +47,7 @@ import java.util.regex.Pattern;
*/
public class CppLogMessageHandler implements Closeable {
private static final Logger LOGGER = Loggers.getLogger(CppLogMessageHandler.class);
private static final Logger LOGGER = LogManager.getLogger(CppLogMessageHandler.class);
private static final int DEFAULT_READBUF_SIZE = 1024;
private static final int DEFAULT_ERROR_STORE_SIZE = 5;
private static final long MAX_MESSAGE_INTERVAL_SECONDS = 10;

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.ml.filestructurefinder;
import org.elasticsearch.common.logging.Loggers;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
@ -80,7 +80,7 @@ public abstract class FileStructureTestCase extends ESTestCase {
@After
public void printExplanation() {
Loggers.getLogger(getClass()).info("Explanation:\n" + String.join("\n", explanation));
LogManager.getLogger(getClass()).info("Explanation:\n" + String.join("\n", explanation));
}
protected Boolean randomHasByteOrderMarker(String charset) {

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.process.logging;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.ESTestCase;
@ -205,7 +206,7 @@ public class CppLogMessageHandlerTests extends ESTestCase {
private static void executeLoggingTest(InputStream is, MockLogAppender mockAppender, Level level, String jobId)
throws IOException {
Logger cppMessageLogger = Loggers.getLogger(CppLogMessageHandler.class);
Logger cppMessageLogger = LogManager.getLogger(CppLogMessageHandler.class);
Loggers.addAppender(cppMessageLogger, mockAppender);
Level oldLevel = cppMessageLogger.getLevel();

View File

@ -11,12 +11,12 @@ import java.util.Map;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
@ -33,7 +33,7 @@ import static org.elasticsearch.xpack.monitoring.exporter.ClusterAlertsUtil.LAST
*/
public class ClusterAlertHttpResource extends PublishableHttpResource {
private static final Logger logger = Loggers.getLogger(ClusterAlertHttpResource.class);
private static final Logger logger = LogManager.getLogger(ClusterAlertHttpResource.class);
/**
* Use this to retrieve the version of Cluster Alert in the Watch's JSON response from a request.

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.monitoring.exporter.http;
import org.apache.http.entity.ContentType;
import org.apache.http.nio.entity.NByteArrayEntity;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
@ -18,7 +19,6 @@ import org.elasticsearch.client.ResponseListener;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -39,7 +39,7 @@ import java.util.Map;
*/
class HttpExportBulk extends ExportBulk {
private static final Logger logger = Loggers.getLogger(HttpExportBulk.class);
private static final Logger logger = LogManager.getLogger(HttpExportBulk.class);
/**
* The {@link RestClient} managed by the {@link HttpExporter}.

View File

@ -5,10 +5,10 @@
*/
package org.elasticsearch.xpack.monitoring.exporter.http;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseListener;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContent;
@ -23,7 +23,7 @@ import java.util.Objects;
*/
class HttpExportBulkResponseListener implements ResponseListener {
private static final Logger logger = Loggers.getLogger(HttpExportBulkResponseListener.class);
private static final Logger logger = LogManager.getLogger(HttpExportBulkResponseListener.class);
/**
* Singleton instance.

View File

@ -13,6 +13,7 @@ import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.message.BasicHeader;
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.client.RestClient;
@ -24,7 +25,6 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
@ -69,7 +69,7 @@ import java.util.stream.Collectors;
*/
public class HttpExporter extends Exporter {
private static final Logger logger = Loggers.getLogger(HttpExporter.class);
private static final Logger logger = LogManager.getLogger(HttpExporter.class);
public static final String TYPE = "http";

View File

@ -5,9 +5,9 @@
*/
package org.elasticsearch.xpack.monitoring.exporter.http;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.logging.Loggers;
import java.util.Collections;
import java.util.List;
@ -22,7 +22,7 @@ import java.util.List;
*/
public class MultiHttpResource extends HttpResource {
private static final Logger logger = Loggers.getLogger(MultiHttpResource.class);
private static final Logger logger = LogManager.getLogger(MultiHttpResource.class);
/**
* Sub-resources that are grouped to simplify notification.

View File

@ -6,13 +6,13 @@
package org.elasticsearch.xpack.monitoring.exporter.http;
import org.apache.http.HttpHost;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.client.Node;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.sniff.Sniffer;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
/**
* {@code NodeFailureListener} logs warnings for any node failure, but it can also notify a {@link Sniffer} and/or {@link HttpResource}
@ -23,7 +23,7 @@ import org.elasticsearch.common.logging.Loggers;
*/
class NodeFailureListener extends RestClient.FailureListener {
private static final Logger logger = Loggers.getLogger(NodeFailureListener.class);
private static final Logger logger = LogManager.getLogger(NodeFailureListener.class);
/**
* The optional {@link Sniffer} associated with the {@link RestClient}.

View File

@ -8,10 +8,10 @@ package org.elasticsearch.xpack.monitoring.exporter.http;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils;
@ -27,7 +27,7 @@ import java.util.function.Supplier;
*/
public class PipelineHttpResource extends PublishableHttpResource {
private static final Logger logger = Loggers.getLogger(PipelineHttpResource.class);
private static final Logger logger = LogManager.getLogger(PipelineHttpResource.class);
/**
* The name of the pipeline that is sent to the remote cluster.

View File

@ -8,10 +8,10 @@ package org.elasticsearch.xpack.monitoring.exporter.http;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils;
@ -28,7 +28,7 @@ import java.util.function.Supplier;
*/
public class TemplateHttpResource extends PublishableHttpResource {
private static final Logger logger = Loggers.getLogger(TemplateHttpResource.class);
private static final Logger logger = LogManager.getLogger(TemplateHttpResource.class);
/**
* The name of the template that is sent to the remote cluster.

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.monitoring.exporter.http;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
@ -12,7 +13,6 @@ import org.elasticsearch.Version;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
@ -25,7 +25,7 @@ import java.util.Objects;
*/
public class VersionHttpResource extends HttpResource {
private static final Logger logger = Loggers.getLogger(VersionHttpResource.class);
private static final Logger logger = LogManager.getLogger(VersionHttpResource.class);
/**
* The minimum supported version of Elasticsearch.

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.monitoring.exporter.http;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
@ -12,7 +13,6 @@ import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -31,7 +31,7 @@ import java.util.Set;
*/
public class WatcherExistsHttpResource extends PublishableHttpResource {
private static final Logger logger = Loggers.getLogger(WatcherExistsHttpResource.class);
private static final Logger logger = LogManager.getLogger(WatcherExistsHttpResource.class);
/**
* Use this to avoid getting any JSON response from a request.
*/

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.monitoring.exporter.local;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
@ -26,7 +27,6 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ThreadContext;
@ -80,7 +80,7 @@ import static org.elasticsearch.xpack.monitoring.Monitoring.CLEAN_WATCHER_HISTOR
public class LocalExporter extends Exporter implements ClusterStateListener, CleanerService.Listener {
private static final Logger logger = Loggers.getLogger(LocalExporter.class);
private static final Logger logger = LogManager.getLogger(LocalExporter.class);
public static final String TYPE = "local";

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.rollup;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHits;
@ -55,7 +55,7 @@ import java.util.stream.Collectors;
*/
public class RollupResponseTranslator {
private static final Logger logger = Loggers.getLogger(RollupResponseTranslator.class);
private static final Logger logger = LogManager.getLogger(RollupResponseTranslator.class);
/**
* Verifies a live-only search response. Essentially just checks for failure then returns

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.rollup.action;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.Version;
@ -27,7 +28,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.query.BoolQueryBuilder;
@ -81,7 +81,7 @@ public class TransportRollupSearchAction extends TransportAction<SearchRequest,
private final BigArrays bigArrays;
private final ScriptService scriptService;
private final ClusterService clusterService;
private static final Logger logger = Loggers.getLogger(RollupSearchAction.class);
private static final Logger logger = LogManager.getLogger(RollupSearchAction.class);
@Inject
public TransportRollupSearchAction(Settings settings, TransportService transportService,

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.security;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.Version;
@ -28,7 +29,6 @@ import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.regex.Regex;
@ -105,9 +105,9 @@ import org.elasticsearch.xpack.core.security.action.token.RefreshTokenAction;
import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction;
import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction;
import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction;
import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction;
import org.elasticsearch.xpack.core.security.action.user.GetUsersAction;
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction;
import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction;
import org.elasticsearch.xpack.core.security.action.user.PutUserAction;
import org.elasticsearch.xpack.core.security.action.user.SetEnabledAction;
import org.elasticsearch.xpack.core.security.authc.AuthenticationFailureHandler;
@ -122,6 +122,7 @@ import org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexSe
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions;
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache;
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult;
import org.elasticsearch.xpack.core.security.index.IndexAuditTrailField;
import org.elasticsearch.xpack.core.security.support.Automatons;
import org.elasticsearch.xpack.core.security.user.AnonymousUser;
@ -161,9 +162,9 @@ import org.elasticsearch.xpack.security.action.token.TransportRefreshTokenAction
import org.elasticsearch.xpack.security.action.user.TransportAuthenticateAction;
import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction;
import org.elasticsearch.xpack.security.action.user.TransportDeleteUserAction;
import org.elasticsearch.xpack.security.action.user.TransportGetUserPrivilegesAction;
import org.elasticsearch.xpack.security.action.user.TransportGetUsersAction;
import org.elasticsearch.xpack.security.action.user.TransportHasPrivilegesAction;
import org.elasticsearch.xpack.security.action.user.TransportGetUserPrivilegesAction;
import org.elasticsearch.xpack.security.action.user.TransportPutUserAction;
import org.elasticsearch.xpack.security.action.user.TransportSetEnabledAction;
import org.elasticsearch.xpack.security.audit.AuditTrail;
@ -185,7 +186,6 @@ import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore;
import org.elasticsearch.xpack.security.authz.store.FileRolesStore;
import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore;
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult;
import org.elasticsearch.xpack.security.ingest.SetSecurityUserProcessor;
import org.elasticsearch.xpack.security.rest.SecurityRestFilter;
import org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction;
@ -256,7 +256,7 @@ import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECU
public class Security extends Plugin implements ActionPlugin, IngestPlugin, NetworkPlugin, ClusterPlugin,
DiscoveryPlugin, MapperPlugin, ExtensiblePlugin {
private static final Logger logger = Loggers.getLogger(Security.class);
private static final Logger logger = LogManager.getLogger(Security.class);
static final Setting<List<String>> AUDIT_OUTPUTS_SETTING =
Setting.listSetting(SecurityField.setting("audit.outputs"),

View File

@ -11,9 +11,9 @@ import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.logging.Loggers;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.opensaml.saml.saml2.core.Issuer;
@ -32,7 +32,7 @@ public abstract class SamlMessageBuilder {
protected final EntityDescriptor identityProvider;
public SamlMessageBuilder(EntityDescriptor identityProvider, SpConfiguration serviceProvider, Clock clock) {
this.logger = Loggers.getLogger(getClass());
this.logger = LogManager.getLogger(getClass());
this.identityProvider = identityProvider;
this.serviceProvider = serviceProvider;
this.clock = clock;

View File

@ -30,6 +30,7 @@ import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.cli.EnvironmentAwareCommand;

View File

@ -6,10 +6,10 @@
package org.elasticsearch.xpack.security.authc.support;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.license.LicenseUtils;
@ -60,7 +60,7 @@ public class DelegatedAuthorizationSupport {
final List<Realm> resolvedLookupRealms = resolveRealms(allRealms, lookupRealms);
checkForRealmChains(resolvedLookupRealms, settings);
this.lookup = new RealmUserLookup(resolvedLookupRealms, threadContext);
this.logger = Loggers.getLogger(getClass());
this.logger = LogManager.getLogger(getClass());
this.licenseState = licenseState;
}

View File

@ -8,12 +8,12 @@ package org.elasticsearch.xpack.security.authc.support;
import com.unboundid.ldap.sdk.DN;
import com.unboundid.ldap.sdk.LDAPException;
import com.unboundid.util.LDAPSDKUsageException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.core.security.authc.RealmConfig;
import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel;
import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression;
@ -159,7 +159,7 @@ public interface UserRoleMapper {
*
*/
class DistinguishedNamePredicate implements Predicate<FieldExpression.FieldValue> {
private static final Logger LOGGER = Loggers.getLogger(DistinguishedNamePredicate.class);
private static final Logger LOGGER = LogManager.getLogger(DistinguishedNamePredicate.class);
private final String string;
private final DN dn;

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.security.transport;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
@ -13,7 +14,6 @@ import org.elasticsearch.action.admin.indices.close.CloseIndexAction;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
import org.elasticsearch.action.admin.indices.open.OpenIndexAction;
import org.elasticsearch.action.support.DestructiveOperations;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.transport.TaskTransportChannel;
import org.elasticsearch.transport.TcpChannel;
@ -57,7 +57,7 @@ public interface ServerTransportFilter {
* request is properly authenticated and authorized
*/
class NodeProfile implements ServerTransportFilter {
private static final Logger logger = Loggers.getLogger(NodeProfile.class);
private static final Logger logger = LogManager.getLogger(NodeProfile.class);
private final AuthenticationService authcService;
private final AuthorizationService authzService;

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.security.authc.saml;
import org.apache.logging.log4j.LogManager;
import org.apache.xml.security.Init;
import org.apache.xml.security.encryption.EncryptedData;
import org.apache.xml.security.encryption.EncryptedKey;
@ -16,7 +17,6 @@ import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.core.watcher.watch.ClockMock;
import org.hamcrest.Matchers;
@ -57,7 +57,6 @@ import javax.xml.crypto.dsig.spec.TransformParameterSpec;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
@ -125,7 +124,7 @@ public class SamlAuthenticatorTests extends SamlTestCase {
public static void init() throws Exception {
assumeFalse("Can't run in a FIPS JVM, there is no DOM XMLSignature Factory so we can't sign XML documents", inFipsJvm());
// TODO: Refactor the signing to use org.opensaml.xmlsec.signature.support.Signer so that we can run the tests
SamlUtils.initialize(Loggers.getLogger(SamlAuthenticatorTests.class));
SamlUtils.initialize(LogManager.getLogger(SamlAuthenticatorTests.class));
// Initialise Apache XML security so that the signDoc methods work correctly.
Init.init();
}

View File

@ -5,12 +5,12 @@
*/
package org.elasticsearch.xpack.security.authc.saml;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ssl.CertParsingUtils;
import org.elasticsearch.xpack.core.ssl.PemUtils;
@ -37,7 +37,7 @@ public abstract class SamlTestCase extends ESTestCase {
@BeforeClass
public static void setupSaml() throws Exception {
Logger logger = Loggers.getLogger(SamlTestCase.class);
Logger logger = LogManager.getLogger(SamlTestCase.class);
if (isTurkishLocale()) {
// See: https://github.com/elastic/x-pack-elasticsearch/issues/2815
logger.warn("Attempting to run SAML test on turkish-like locale, but that breaks OpenSAML. Switching to English.");

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.sql.execution.search;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.SearchRequest;
@ -16,7 +17,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation;
@ -42,7 +42,7 @@ import java.util.Objects;
*/
public class CompositeAggregationCursor implements Cursor {
private final Logger log = Loggers.getLogger(getClass());
private final Logger log = LogManager.getLogger(getClass());
public static final String NAME = "c";

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.sql.execution.search;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.SearchRequest;
@ -13,7 +14,6 @@ import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -61,7 +61,7 @@ import static java.util.Collections.singletonList;
// TODO: add retry/back-off
public class Querier {
private final Logger log = Loggers.getLogger(getClass());
private final Logger log = LogManager.getLogger(getClass());
private final TimeValue keepAlive, timeout;
private final int size;

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.execution.search;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.ClearScrollRequest;
@ -16,7 +17,6 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
import org.elasticsearch.xpack.sql.session.Configuration;
import org.elasticsearch.xpack.sql.session.Cursor;
@ -28,7 +28,7 @@ import java.util.Objects;
public class ScrollCursor implements Cursor {
private final Logger log = Loggers.getLogger(getClass());
private final Logger log = LogManager.getLogger(getClass());
public static final String NAME = "s";

View File

@ -7,8 +7,8 @@ package org.elasticsearch.xpack.sql.rule;
import java.util.function.UnaryOperator;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.sql.tree.Node;
import org.elasticsearch.xpack.sql.util.ReflectionUtils;
@ -22,7 +22,7 @@ import org.elasticsearch.xpack.sql.util.ReflectionUtils;
*/
public abstract class Rule<E extends T, T extends Node<T>> implements UnaryOperator<T> {
protected Logger log = Loggers.getLogger(getClass());
protected Logger log = LogManager.getLogger(getClass());
private final String name;
private final Class<E> typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass());

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.sql.rule;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.sql.tree.Node;
import org.elasticsearch.xpack.sql.tree.NodeUtils;
@ -18,7 +18,7 @@ import java.util.Map;
public abstract class RuleExecutor<TreeType extends Node<TreeType>> {
private final Logger log = Loggers.getLogger(getClass());
private final Logger log = LogManager.getLogger(getClass());
public static class Limiter {
public static final Limiter DEFAULT = new Limiter(100);

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.upgrade;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
@ -16,7 +17,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentType;
@ -98,7 +98,7 @@ public class IndexUpgradeTasksIT extends ESIntegTestCase {
public MockUpgradePlugin(Settings settings) {
this.settings = settings;
this.upgrade = new Upgrade(settings);
Loggers.getLogger(IndexUpgradeTasksIT.class).info("MockUpgradePlugin is created");
LogManager.getLogger(IndexUpgradeTasksIT.class).info("MockUpgradePlugin is created");
}

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.watcher.notification.email.attachment;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@ -22,13 +22,13 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.core.watcher.watch.Payload;
import org.elasticsearch.xpack.watcher.common.http.BasicAuth;
import org.elasticsearch.xpack.watcher.common.http.HttpClient;
import org.elasticsearch.xpack.watcher.common.http.HttpMethod;
import org.elasticsearch.xpack.watcher.common.http.HttpProxy;
import org.elasticsearch.xpack.watcher.common.http.HttpRequest;
import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate;
import org.elasticsearch.xpack.watcher.common.http.HttpResponse;
import org.elasticsearch.xpack.watcher.common.http.BasicAuth;
import org.elasticsearch.xpack.watcher.common.text.TextTemplate;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.notification.email.Attachment;
@ -74,7 +74,7 @@ public class ReportingAttachmentParser implements EmailAttachmentParser<Reportin
this.retries = RETRIES_SETTING.get(settings);
this.httpClient = httpClient;
this.templateEngine = templateEngine;
this.logger = Loggers.getLogger(getClass());
this.logger = LogManager.getLogger(getClass());
}
@Override

View File

@ -5,9 +5,9 @@
*/
package org.elasticsearch.xpack.watcher.transform.chain;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
@ -144,7 +144,7 @@ public class ChainTransformTests extends ESTestCase {
}
NamedExecutableTransform(Transform transform) {
super(transform, Loggers.getLogger(NamedExecutableTransform.class));
super(transform, LogManager.getLogger(NamedExecutableTransform.class));
}
@Override

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.watch;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.search.SearchRequest;
@ -12,7 +13,6 @@ import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -172,7 +172,7 @@ public class WatchTests extends ESTestCase {
templateEngine = mock(TextTemplateEngine.class);
htmlSanitizer = mock(HtmlSanitizer.class);
licenseState = mock(XPackLicenseState.class);
logger = Loggers.getLogger(WatchTests.class);
logger = LogManager.getLogger(WatchTests.class);
searchTemplateService = mock(WatcherSearchTemplateService.class);
}

View File

@ -6,10 +6,10 @@
package org.elasticsearch.xpack.security.authc.kerberos;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
@ -82,7 +82,7 @@ public abstract class KerberosTestCase extends ESTestCase {
@BeforeClass
public static void setupKerberos() throws Exception {
if (isLocaleUnsupported()) {
Logger logger = Loggers.getLogger(KerberosTestCase.class);
Logger logger = LogManager.getLogger(KerberosTestCase.class);
logger.warn("Attempting to run Kerberos test on {} locale, but that breaks SimpleKdcServer. Switching to English.",
Locale.getDefault());
restoreLocale = Locale.getDefault();

View File

@ -14,10 +14,10 @@ import org.apache.kerby.kerberos.kerb.client.KrbConfig;
import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
import org.apache.kerby.util.NetworkUtil;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
@ -37,7 +37,7 @@ import java.util.concurrent.TimeUnit;
* Starts in memory Ldap server and then uses it as backend for Kdc Server.
*/
public class SimpleKdcLdapServer {
private static final Logger logger = Loggers.getLogger(SimpleKdcLdapServer.class);
private static final Logger logger = LogManager.getLogger(SimpleKdcLdapServer.class);
private Path workDir = null;
private SimpleKdcServer simpleKdc;

View File

@ -11,6 +11,7 @@ import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.xpack.sql.cli.Cli;
import org.elasticsearch.xpack.sql.cli.CliTerminal;