Remove log4j dependency from elasticsearch-core (#28705)
* Remove log4j dependency from elasticsearch-core This removes the log4j dependency from our elasticsearch-core project. It was originally necessary only for our jar classpath checking. It is now replaced by a `Consumer<String>` so that the es-core dependency doesn't have external dependencies. The parts of #28191 which were moved in conjunction (like `ESLoggerFactory` and `Loggers`) have been moved back where appropriate, since they are not required in the core jar. This is tangentially related to #28504 * Add javadocs for `output` parameter * Change @code to @link
This commit is contained in:
parent
b00870600b
commit
d4fddfa2a0
|
@ -35,8 +35,6 @@ publishing {
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile "org.apache.logging.log4j:log4j-api:${versions.log4j}"
|
|
||||||
|
|
||||||
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
|
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
|
||||||
testCompile "junit:junit:${versions.junit}"
|
testCompile "junit:junit:${versions.junit}"
|
||||||
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
|
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
|
||||||
|
@ -78,4 +76,4 @@ thirdPartyAudit.excludes = [
|
||||||
'org/osgi/framework/SynchronousBundleListener',
|
'org/osgi/framework/SynchronousBundleListener',
|
||||||
'org/osgi/framework/wiring/BundleWire',
|
'org/osgi/framework/wiring/BundleWire',
|
||||||
'org/osgi/framework/wiring/BundleWiring'
|
'org/osgi/framework/wiring/BundleWiring'
|
||||||
]
|
]
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
7a2999229464e7a324aa503c0a52ec0f05efe7bd
|
|
|
@ -1,202 +0,0 @@
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright 1999-2005 The Apache Software Foundation
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
|
@ -1,5 +0,0 @@
|
||||||
Apache log4j
|
|
||||||
Copyright 2007 The Apache Software Foundation
|
|
||||||
|
|
||||||
This product includes software developed at
|
|
||||||
The Apache Software Foundation (http://www.apache.org/).
|
|
|
@ -19,10 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.bootstrap;
|
package org.elasticsearch.bootstrap;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.elasticsearch.common.SuppressForbidden;
|
import org.elasticsearch.common.SuppressForbidden;
|
||||||
import org.elasticsearch.common.io.PathUtils;
|
import org.elasticsearch.common.io.PathUtils;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
|
@ -43,6 +41,7 @@ import java.util.LinkedHashSet;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.function.Consumer;
|
||||||
import java.util.jar.JarEntry;
|
import java.util.jar.JarEntry;
|
||||||
import java.util.jar.JarFile;
|
import java.util.jar.JarFile;
|
||||||
import java.util.jar.Manifest;
|
import java.util.jar.Manifest;
|
||||||
|
@ -68,25 +67,23 @@ public class JarHell {
|
||||||
@SuppressForbidden(reason = "command line tool")
|
@SuppressForbidden(reason = "command line tool")
|
||||||
public static void main(String args[]) throws Exception {
|
public static void main(String args[]) throws Exception {
|
||||||
System.out.println("checking for jar hell...");
|
System.out.println("checking for jar hell...");
|
||||||
checkJarHell();
|
checkJarHell(System.out::println);
|
||||||
System.out.println("no jar hell found");
|
System.out.println("no jar hell found");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks the current classpath for duplicate classes
|
* Checks the current classpath for duplicate classes
|
||||||
|
* @param output A {@link String} {@link Consumer} to which debug output will be sent
|
||||||
* @throws IllegalStateException if jar hell was found
|
* @throws IllegalStateException if jar hell was found
|
||||||
*/
|
*/
|
||||||
public static void checkJarHell() throws IOException, URISyntaxException {
|
public static void checkJarHell(Consumer<String> output) throws IOException, URISyntaxException {
|
||||||
ClassLoader loader = JarHell.class.getClassLoader();
|
ClassLoader loader = JarHell.class.getClassLoader();
|
||||||
Logger logger = Loggers.getLogger(JarHell.class);
|
output.accept("java.class.path: " + System.getProperty("java.class.path"));
|
||||||
if (logger.isDebugEnabled()) {
|
output.accept("sun.boot.class.path: " + System.getProperty("sun.boot.class.path"));
|
||||||
logger.debug("java.class.path: {}", System.getProperty("java.class.path"));
|
if (loader instanceof URLClassLoader) {
|
||||||
logger.debug("sun.boot.class.path: {}", System.getProperty("sun.boot.class.path"));
|
output.accept("classloader urls: " + Arrays.toString(((URLClassLoader)loader).getURLs()));
|
||||||
if (loader instanceof URLClassLoader ) {
|
|
||||||
logger.debug("classloader urls: {}", Arrays.toString(((URLClassLoader)loader).getURLs()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
checkJarHell(parseClassPath());
|
checkJarHell(parseClassPath(), output);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -152,23 +149,24 @@ public class JarHell {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks the set of URLs for duplicate classes
|
* Checks the set of URLs for duplicate classes
|
||||||
|
* @param urls A set of URLs from the classpath to be checked for conflicting jars
|
||||||
|
* @param output A {@link String} {@link Consumer} to which debug output will be sent
|
||||||
* @throws IllegalStateException if jar hell was found
|
* @throws IllegalStateException if jar hell was found
|
||||||
*/
|
*/
|
||||||
@SuppressForbidden(reason = "needs JarFile for speed, just reading entries")
|
@SuppressForbidden(reason = "needs JarFile for speed, just reading entries")
|
||||||
public static void checkJarHell(Set<URL> urls) throws URISyntaxException, IOException {
|
public static void checkJarHell(Set<URL> urls, Consumer<String> output) throws URISyntaxException, IOException {
|
||||||
Logger logger = Loggers.getLogger(JarHell.class);
|
|
||||||
// we don't try to be sneaky and use deprecated/internal/not portable stuff
|
// we don't try to be sneaky and use deprecated/internal/not portable stuff
|
||||||
// like sun.boot.class.path, and with jigsaw we don't yet have a way to get
|
// like sun.boot.class.path, and with jigsaw we don't yet have a way to get
|
||||||
// a "list" at all. So just exclude any elements underneath the java home
|
// a "list" at all. So just exclude any elements underneath the java home
|
||||||
String javaHome = System.getProperty("java.home");
|
String javaHome = System.getProperty("java.home");
|
||||||
logger.debug("java.home: {}", javaHome);
|
output.accept("java.home: " + javaHome);
|
||||||
final Map<String,Path> clazzes = new HashMap<>(32768);
|
final Map<String,Path> clazzes = new HashMap<>(32768);
|
||||||
Set<Path> seenJars = new HashSet<>();
|
Set<Path> seenJars = new HashSet<>();
|
||||||
for (final URL url : urls) {
|
for (final URL url : urls) {
|
||||||
final Path path = PathUtils.get(url.toURI());
|
final Path path = PathUtils.get(url.toURI());
|
||||||
// exclude system resources
|
// exclude system resources
|
||||||
if (path.startsWith(javaHome)) {
|
if (path.startsWith(javaHome)) {
|
||||||
logger.debug("excluding system resource: {}", path);
|
output.accept("excluding system resource: " + path);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (path.toString().endsWith(".jar")) {
|
if (path.toString().endsWith(".jar")) {
|
||||||
|
@ -176,7 +174,7 @@ public class JarHell {
|
||||||
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
|
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
|
||||||
"duplicate jar on classpath: " + path);
|
"duplicate jar on classpath: " + path);
|
||||||
}
|
}
|
||||||
logger.debug("examining jar: {}", path);
|
output.accept("examining jar: " + path);
|
||||||
try (JarFile file = new JarFile(path.toString())) {
|
try (JarFile file = new JarFile(path.toString())) {
|
||||||
Manifest manifest = file.getManifest();
|
Manifest manifest = file.getManifest();
|
||||||
if (manifest != null) {
|
if (manifest != null) {
|
||||||
|
@ -194,7 +192,7 @@ public class JarHell {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.debug("examining directory: {}", path);
|
output.accept("examining directory: " + path);
|
||||||
// case for tests: where we have class files in the classpath
|
// case for tests: where we have class files in the classpath
|
||||||
final Path root = PathUtils.get(url.toURI());
|
final Path root = PathUtils.get(url.toURI());
|
||||||
final String sep = root.getFileSystem().getSeparator();
|
final String sep = root.getFileSystem().getSeparator();
|
||||||
|
|
|
@ -1,69 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.common.logging;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
|
|
||||||
public class Loggers {
|
|
||||||
|
|
||||||
public static final String SPACE = " ";
|
|
||||||
|
|
||||||
public static Logger getLogger(Logger parentLogger, String s) {
|
|
||||||
assert parentLogger instanceof PrefixLogger;
|
|
||||||
return ESLoggerFactory.getLogger(((PrefixLogger)parentLogger).prefix(), parentLogger.getName() + s);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Logger getLogger(String s) {
|
|
||||||
return ESLoggerFactory.getLogger(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Logger getLogger(Class<?> clazz) {
|
|
||||||
return ESLoggerFactory.getLogger(clazz);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Logger getLogger(Class<?> clazz, String... prefixes) {
|
|
||||||
return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Logger getLogger(String name, String... prefixes) {
|
|
||||||
return ESLoggerFactory.getLogger(formatPrefix(prefixes), name);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String formatPrefix(String... prefixes) {
|
|
||||||
String prefix = null;
|
|
||||||
if (prefixes != null && prefixes.length > 0) {
|
|
||||||
StringBuilder sb = new StringBuilder();
|
|
||||||
for (String prefixX : prefixes) {
|
|
||||||
if (prefixX != null) {
|
|
||||||
if (prefixX.equals(SPACE)) {
|
|
||||||
sb.append(" ");
|
|
||||||
} else {
|
|
||||||
sb.append("[").append(prefixX).append("]");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (sb.length() > 0) {
|
|
||||||
sb.append(" ");
|
|
||||||
prefix = sb.toString();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return prefix;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -66,7 +66,7 @@ public class JarHellTests extends ESTestCase {
|
||||||
Set<URL> jars = asSet(makeJar(dir, "foo.jar", null, "DuplicateClass.class"),
|
Set<URL> jars = asSet(makeJar(dir, "foo.jar", null, "DuplicateClass.class"),
|
||||||
makeJar(dir, "bar.jar", null, "DuplicateClass.class"));
|
makeJar(dir, "bar.jar", null, "DuplicateClass.class"));
|
||||||
try {
|
try {
|
||||||
JarHell.checkJarHell(jars);
|
JarHell.checkJarHell(jars, logger::debug);
|
||||||
fail("did not get expected exception");
|
fail("did not get expected exception");
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
assertTrue(e.getMessage().contains("jar hell!"));
|
assertTrue(e.getMessage().contains("jar hell!"));
|
||||||
|
@ -82,7 +82,7 @@ public class JarHellTests extends ESTestCase {
|
||||||
Set<URL> dirs = asSet(makeFile(dir1, "DuplicateClass.class"),
|
Set<URL> dirs = asSet(makeFile(dir1, "DuplicateClass.class"),
|
||||||
makeFile(dir2, "DuplicateClass.class"));
|
makeFile(dir2, "DuplicateClass.class"));
|
||||||
try {
|
try {
|
||||||
JarHell.checkJarHell(dirs);
|
JarHell.checkJarHell(dirs, logger::debug);
|
||||||
fail("did not get expected exception");
|
fail("did not get expected exception");
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
assertTrue(e.getMessage().contains("jar hell!"));
|
assertTrue(e.getMessage().contains("jar hell!"));
|
||||||
|
@ -98,7 +98,7 @@ public class JarHellTests extends ESTestCase {
|
||||||
Set<URL> dirs = asSet(makeJar(dir1, "foo.jar", null, "DuplicateClass.class"),
|
Set<URL> dirs = asSet(makeJar(dir1, "foo.jar", null, "DuplicateClass.class"),
|
||||||
makeFile(dir2, "DuplicateClass.class"));
|
makeFile(dir2, "DuplicateClass.class"));
|
||||||
try {
|
try {
|
||||||
JarHell.checkJarHell(dirs);
|
JarHell.checkJarHell(dirs, logger::debug);
|
||||||
fail("did not get expected exception");
|
fail("did not get expected exception");
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
assertTrue(e.getMessage().contains("jar hell!"));
|
assertTrue(e.getMessage().contains("jar hell!"));
|
||||||
|
@ -113,7 +113,7 @@ public class JarHellTests extends ESTestCase {
|
||||||
// this bogus jar had to be with https://github.com/jasontedor/duplicate-classes
|
// this bogus jar had to be with https://github.com/jasontedor/duplicate-classes
|
||||||
Set<URL> jars = Collections.singleton(JarHellTests.class.getResource("duplicate-classes.jar"));
|
Set<URL> jars = Collections.singleton(JarHellTests.class.getResource("duplicate-classes.jar"));
|
||||||
try {
|
try {
|
||||||
JarHell.checkJarHell(jars);
|
JarHell.checkJarHell(jars, logger::debug);
|
||||||
fail("did not get expected exception");
|
fail("did not get expected exception");
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
assertTrue(e.getMessage().contains("jar hell!"));
|
assertTrue(e.getMessage().contains("jar hell!"));
|
||||||
|
@ -125,7 +125,7 @@ public class JarHellTests extends ESTestCase {
|
||||||
|
|
||||||
public void testXmlBeansLeniency() throws Exception {
|
public void testXmlBeansLeniency() throws Exception {
|
||||||
Set<URL> jars = Collections.singleton(JarHellTests.class.getResource("duplicate-xmlbeans-classes.jar"));
|
Set<URL> jars = Collections.singleton(JarHellTests.class.getResource("duplicate-xmlbeans-classes.jar"));
|
||||||
JarHell.checkJarHell(jars);
|
JarHell.checkJarHell(jars, logger::debug);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRequiredJDKVersionTooOld() throws Exception {
|
public void testRequiredJDKVersionTooOld() throws Exception {
|
||||||
|
@ -144,7 +144,7 @@ public class JarHellTests extends ESTestCase {
|
||||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), targetVersion.toString());
|
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), targetVersion.toString());
|
||||||
Set<URL> jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class"));
|
Set<URL> jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class"));
|
||||||
try {
|
try {
|
||||||
JarHell.checkJarHell(jars);
|
JarHell.checkJarHell(jars, logger::debug);
|
||||||
fail("did not get expected exception");
|
fail("did not get expected exception");
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
assertTrue(e.getMessage().contains("requires Java " + targetVersion.toString()));
|
assertTrue(e.getMessage().contains("requires Java " + targetVersion.toString()));
|
||||||
|
@ -160,7 +160,7 @@ public class JarHellTests extends ESTestCase {
|
||||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "bogus");
|
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "bogus");
|
||||||
Set<URL> jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class"));
|
Set<URL> jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class"));
|
||||||
try {
|
try {
|
||||||
JarHell.checkJarHell(jars);
|
JarHell.checkJarHell(jars, logger::debug);
|
||||||
fail("did not get expected exception");
|
fail("did not get expected exception");
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
assertTrue(e.getMessage().equals("version string must be a sequence of nonnegative decimal integers separated " +
|
assertTrue(e.getMessage().equals("version string must be a sequence of nonnegative decimal integers separated " +
|
||||||
|
@ -175,7 +175,7 @@ public class JarHellTests extends ESTestCase {
|
||||||
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0");
|
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0");
|
||||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "1.7");
|
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "1.7");
|
||||||
Set<URL> jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class"));
|
Set<URL> jars = Collections.singleton(makeJar(dir, "foo.jar", manifest, "Foo.class"));
|
||||||
JarHell.checkJarHell(jars);
|
JarHell.checkJarHell(jars, logger::debug);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testValidVersions() {
|
public void testValidVersions() {
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.transport.netty4;
|
||||||
import org.apache.logging.log4j.Level;
|
import org.apache.logging.log4j.Level;
|
||||||
import org.elasticsearch.ESNetty4IntegTestCase;
|
import org.elasticsearch.ESNetty4IntegTestCase;
|
||||||
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest;
|
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.MockLogAppender;
|
import org.elasticsearch.test.MockLogAppender;
|
||||||
|
@ -37,12 +36,12 @@ public class ESLoggingHandlerIT extends ESNetty4IntegTestCase {
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
appender = new MockLogAppender();
|
appender = new MockLogAppender();
|
||||||
ServerLoggers.addAppender(Loggers.getLogger(ESLoggingHandler.class), appender);
|
Loggers.addAppender(Loggers.getLogger(ESLoggingHandler.class), appender);
|
||||||
appender.start();
|
appender.start();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void tearDown() throws Exception {
|
public void tearDown() throws Exception {
|
||||||
ServerLoggers.removeAppender(Loggers.getLogger(ESLoggingHandler.class), appender);
|
Loggers.removeAppender(Loggers.getLogger(ESLoggingHandler.class), appender);
|
||||||
appender.stop();
|
appender.stop();
|
||||||
super.tearDown();
|
super.tearDown();
|
||||||
}
|
}
|
||||||
|
|
|
@ -138,7 +138,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase {
|
||||||
assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(Level.DEBUG));
|
assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(Level.DEBUG));
|
||||||
|
|
||||||
final Level level = randomFrom(Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR);
|
final Level level = randomFrom(Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR);
|
||||||
ServerLoggers.setLevel(ESLoggerFactory.getLogger("x"), level);
|
Loggers.setLevel(ESLoggerFactory.getLogger("x"), level);
|
||||||
|
|
||||||
assertThat(ESLoggerFactory.getLogger("x").getLevel(), equalTo(level));
|
assertThat(ESLoggerFactory.getLogger("x").getLevel(), equalTo(level));
|
||||||
assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(level));
|
assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(level));
|
||||||
|
|
|
@ -285,12 +285,12 @@ public class EvilLoggerTests extends ESTestCase {
|
||||||
|
|
||||||
final Logger hasConsoleAppender = ESLoggerFactory.getLogger("has_console_appender");
|
final Logger hasConsoleAppender = ESLoggerFactory.getLogger("has_console_appender");
|
||||||
|
|
||||||
final Appender testLoggerConsoleAppender = ServerLoggers.findAppender(hasConsoleAppender, ConsoleAppender.class);
|
final Appender testLoggerConsoleAppender = Loggers.findAppender(hasConsoleAppender, ConsoleAppender.class);
|
||||||
assertNotNull(testLoggerConsoleAppender);
|
assertNotNull(testLoggerConsoleAppender);
|
||||||
assertThat(testLoggerConsoleAppender.getName(), equalTo("console"));
|
assertThat(testLoggerConsoleAppender.getName(), equalTo("console"));
|
||||||
final Logger hasCountingNoOpAppender = ESLoggerFactory.getLogger("has_counting_no_op_appender");
|
final Logger hasCountingNoOpAppender = ESLoggerFactory.getLogger("has_counting_no_op_appender");
|
||||||
assertNull(ServerLoggers.findAppender(hasCountingNoOpAppender, ConsoleAppender.class));
|
assertNull(Loggers.findAppender(hasCountingNoOpAppender, ConsoleAppender.class));
|
||||||
final Appender countingNoOpAppender = ServerLoggers.findAppender(hasCountingNoOpAppender, CountingNoOpAppender.class);
|
final Appender countingNoOpAppender = Loggers.findAppender(hasCountingNoOpAppender, CountingNoOpAppender.class);
|
||||||
assertThat(countingNoOpAppender.getName(), equalTo("counting_no_op"));
|
assertThat(countingNoOpAppender.getName(), equalTo("counting_no_op"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.support.PlainActionFuture;
|
import org.elasticsearch.action.support.PlainActionFuture;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||||
|
@ -102,7 +102,7 @@ public class Retry {
|
||||||
this.backoff = backoffPolicy.iterator();
|
this.backoff = backoffPolicy.iterator();
|
||||||
this.consumer = consumer;
|
this.consumer = consumer;
|
||||||
this.listener = listener;
|
this.listener = listener;
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), settings);
|
this.logger = Loggers.getLogger(getClass(), settings);
|
||||||
this.scheduler = scheduler;
|
this.scheduler = scheduler;
|
||||||
// in contrast to System.currentTimeMillis(), nanoTime() uses a monotonic clock under the hood
|
// in contrast to System.currentTimeMillis(), nanoTime() uses a monotonic clock under the hood
|
||||||
this.startTimestampNanos = System.nanoTime();
|
this.startTimestampNanos = System.nanoTime();
|
||||||
|
|
|
@ -36,7 +36,6 @@ import org.elasticsearch.common.SuppressForbidden;
|
||||||
import org.elasticsearch.common.inject.CreationException;
|
import org.elasticsearch.common.inject.CreationException;
|
||||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||||
import org.elasticsearch.common.logging.LogConfigurator;
|
import org.elasticsearch.common.logging.LogConfigurator;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.network.IfConfig;
|
import org.elasticsearch.common.network.IfConfig;
|
||||||
import org.elasticsearch.common.settings.KeyStoreWrapper;
|
import org.elasticsearch.common.settings.KeyStoreWrapper;
|
||||||
|
@ -194,7 +193,8 @@ final class Bootstrap {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// look for jar hell
|
// look for jar hell
|
||||||
JarHell.checkJarHell();
|
final Logger logger = ESLoggerFactory.getLogger(JarHell.class);
|
||||||
|
JarHell.checkJarHell(logger::debug);
|
||||||
} catch (IOException | URISyntaxException e) {
|
} catch (IOException | URISyntaxException e) {
|
||||||
throw new BootstrapException(e);
|
throw new BootstrapException(e);
|
||||||
}
|
}
|
||||||
|
@ -301,9 +301,9 @@ final class Bootstrap {
|
||||||
try {
|
try {
|
||||||
if (closeStandardStreams) {
|
if (closeStandardStreams) {
|
||||||
final Logger rootLogger = ESLoggerFactory.getRootLogger();
|
final Logger rootLogger = ESLoggerFactory.getRootLogger();
|
||||||
final Appender maybeConsoleAppender = ServerLoggers.findAppender(rootLogger, ConsoleAppender.class);
|
final Appender maybeConsoleAppender = Loggers.findAppender(rootLogger, ConsoleAppender.class);
|
||||||
if (maybeConsoleAppender != null) {
|
if (maybeConsoleAppender != null) {
|
||||||
ServerLoggers.removeAppender(rootLogger, maybeConsoleAppender);
|
Loggers.removeAppender(rootLogger, maybeConsoleAppender);
|
||||||
}
|
}
|
||||||
closeSystOut();
|
closeSystOut();
|
||||||
}
|
}
|
||||||
|
@ -334,9 +334,9 @@ final class Bootstrap {
|
||||||
} catch (NodeValidationException | RuntimeException e) {
|
} catch (NodeValidationException | RuntimeException e) {
|
||||||
// disable console logging, so user does not see the exception twice (jvm will show it already)
|
// disable console logging, so user does not see the exception twice (jvm will show it already)
|
||||||
final Logger rootLogger = ESLoggerFactory.getRootLogger();
|
final Logger rootLogger = ESLoggerFactory.getRootLogger();
|
||||||
final Appender maybeConsoleAppender = ServerLoggers.findAppender(rootLogger, ConsoleAppender.class);
|
final Appender maybeConsoleAppender = Loggers.findAppender(rootLogger, ConsoleAppender.class);
|
||||||
if (foreground && maybeConsoleAppender != null) {
|
if (foreground && maybeConsoleAppender != null) {
|
||||||
ServerLoggers.removeAppender(rootLogger, maybeConsoleAppender);
|
Loggers.removeAppender(rootLogger, maybeConsoleAppender);
|
||||||
}
|
}
|
||||||
Logger logger = Loggers.getLogger(Bootstrap.class);
|
Logger logger = Loggers.getLogger(Bootstrap.class);
|
||||||
if (INSTANCE.node != null) {
|
if (INSTANCE.node != null) {
|
||||||
|
@ -369,7 +369,7 @@ final class Bootstrap {
|
||||||
}
|
}
|
||||||
// re-enable it if appropriate, so they can see any logging during the shutdown process
|
// re-enable it if appropriate, so they can see any logging during the shutdown process
|
||||||
if (foreground && maybeConsoleAppender != null) {
|
if (foreground && maybeConsoleAppender != null) {
|
||||||
ServerLoggers.addAppender(rootLogger, maybeConsoleAppender);
|
Loggers.addAppender(rootLogger, maybeConsoleAppender);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw e;
|
throw e;
|
||||||
|
|
|
@ -18,11 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common;
|
package org.elasticsearch.common;
|
||||||
|
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
|
||||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.node.Node;
|
import org.elasticsearch.node.Node;
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ public abstract class AbstractComponent {
|
||||||
protected final Settings settings;
|
protected final Settings settings;
|
||||||
|
|
||||||
public AbstractComponent(Settings settings) {
|
public AbstractComponent(Settings settings) {
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), settings);
|
this.logger = Loggers.getLogger(getClass(), settings);
|
||||||
this.deprecationLogger = new DeprecationLogger(logger);
|
this.deprecationLogger = new DeprecationLogger(logger);
|
||||||
this.settings = settings;
|
this.settings = settings;
|
||||||
}
|
}
|
||||||
|
|
|
@ -177,15 +177,15 @@ public class LogConfigurator {
|
||||||
* @param settings the settings from which logger levels will be extracted
|
* @param settings the settings from which logger levels will be extracted
|
||||||
*/
|
*/
|
||||||
private static void configureLoggerLevels(final Settings settings) {
|
private static void configureLoggerLevels(final Settings settings) {
|
||||||
if (ServerLoggers.LOG_DEFAULT_LEVEL_SETTING.exists(settings)) {
|
if (Loggers.LOG_DEFAULT_LEVEL_SETTING.exists(settings)) {
|
||||||
final Level level = ServerLoggers.LOG_DEFAULT_LEVEL_SETTING.get(settings);
|
final Level level = Loggers.LOG_DEFAULT_LEVEL_SETTING.get(settings);
|
||||||
ServerLoggers.setLevel(ESLoggerFactory.getRootLogger(), level);
|
Loggers.setLevel(ESLoggerFactory.getRootLogger(), level);
|
||||||
}
|
}
|
||||||
ServerLoggers.LOG_LEVEL_SETTING.getAllConcreteSettings(settings)
|
Loggers.LOG_LEVEL_SETTING.getAllConcreteSettings(settings)
|
||||||
// do not set a log level for a logger named level (from the default log setting)
|
// do not set a log level for a logger named level (from the default log setting)
|
||||||
.filter(s -> s.getKey().equals(ServerLoggers.LOG_DEFAULT_LEVEL_SETTING.getKey()) == false).forEach(s -> {
|
.filter(s -> s.getKey().equals(Loggers.LOG_DEFAULT_LEVEL_SETTING.getKey()) == false).forEach(s -> {
|
||||||
final Level level = s.get(settings);
|
final Level level = s.get(settings);
|
||||||
ServerLoggers.setLevel(ESLoggerFactory.getLogger(s.getKey().substring("logger.".length())), level);
|
Loggers.setLevel(ESLoggerFactory.getLogger(s.getKey().substring("logger.".length())), level);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,9 @@ import static org.elasticsearch.common.util.CollectionUtils.asArrayList;
|
||||||
/**
|
/**
|
||||||
* A set of utilities around Logging.
|
* A set of utilities around Logging.
|
||||||
*/
|
*/
|
||||||
public class ServerLoggers {
|
public class Loggers {
|
||||||
|
|
||||||
|
public static final String SPACE = " ";
|
||||||
|
|
||||||
public static final Setting<Level> LOG_DEFAULT_LEVEL_SETTING =
|
public static final Setting<Level> LOG_DEFAULT_LEVEL_SETTING =
|
||||||
new Setting<>("logger.level", Level.INFO.name(), Level::valueOf, Setting.Property.NodeScope);
|
new Setting<>("logger.level", Level.INFO.name(), Level::valueOf, Setting.Property.NodeScope);
|
||||||
|
@ -89,6 +91,48 @@ public class ServerLoggers {
|
||||||
return prefixesList;
|
return prefixesList;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Logger getLogger(Logger parentLogger, String s) {
|
||||||
|
assert parentLogger instanceof PrefixLogger;
|
||||||
|
return ESLoggerFactory.getLogger(((PrefixLogger)parentLogger).prefix(), parentLogger.getName() + s);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Logger getLogger(String s) {
|
||||||
|
return ESLoggerFactory.getLogger(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Logger getLogger(Class<?> clazz) {
|
||||||
|
return ESLoggerFactory.getLogger(clazz);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Logger getLogger(Class<?> clazz, String... prefixes) {
|
||||||
|
return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Logger getLogger(String name, String... prefixes) {
|
||||||
|
return ESLoggerFactory.getLogger(formatPrefix(prefixes), name);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String formatPrefix(String... prefixes) {
|
||||||
|
String prefix = null;
|
||||||
|
if (prefixes != null && prefixes.length > 0) {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
for (String prefixX : prefixes) {
|
||||||
|
if (prefixX != null) {
|
||||||
|
if (prefixX.equals(SPACE)) {
|
||||||
|
sb.append(" ");
|
||||||
|
} else {
|
||||||
|
sb.append("[").append(prefixX).append("]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (sb.length() > 0) {
|
||||||
|
sb.append(" ");
|
||||||
|
prefix = sb.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return prefix;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the level of the logger. If the new level is null, the logger will inherit it's level from its nearest ancestor with a non-null
|
* Set the level of the logger. If the new level is null, the logger will inherit it's level from its nearest ancestor with a non-null
|
||||||
* level.
|
* level.
|
|
@ -32,7 +32,7 @@ import java.util.WeakHashMap;
|
||||||
* A logger that prefixes all messages with a fixed prefix specified during construction. The prefix mechanism uses the marker construct, so
|
* A logger that prefixes all messages with a fixed prefix specified during construction. The prefix mechanism uses the marker construct, so
|
||||||
* for the prefixes to appear, the logging layout pattern must include the marker in its pattern.
|
* for the prefixes to appear, the logging layout pattern must include the marker in its pattern.
|
||||||
*/
|
*/
|
||||||
public class PrefixLogger extends ExtendedLoggerWrapper {
|
class PrefixLogger extends ExtendedLoggerWrapper {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* We can not use the built-in Marker tracking (MarkerManager) because the MarkerManager holds a permanent reference to the marker;
|
* We can not use the built-in Marker tracking (MarkerManager) because the MarkerManager holds a permanent reference to the marker;
|
|
@ -46,7 +46,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocatio
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.network.NetworkModule;
|
import org.elasticsearch.common.network.NetworkModule;
|
||||||
import org.elasticsearch.common.network.NetworkService;
|
import org.elasticsearch.common.network.NetworkService;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
|
@ -111,7 +111,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final class LoggingSettingUpdater implements SettingUpdater<Settings> {
|
private static final class LoggingSettingUpdater implements SettingUpdater<Settings> {
|
||||||
final Predicate<String> loggerPredicate = ServerLoggers.LOG_LEVEL_SETTING::match;
|
final Predicate<String> loggerPredicate = Loggers.LOG_LEVEL_SETTING::match;
|
||||||
private final Settings settings;
|
private final Settings settings;
|
||||||
|
|
||||||
LoggingSettingUpdater(Settings settings) {
|
LoggingSettingUpdater(Settings settings) {
|
||||||
|
@ -129,10 +129,10 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||||
builder.put(current.filter(loggerPredicate));
|
builder.put(current.filter(loggerPredicate));
|
||||||
for (String key : previous.keySet()) {
|
for (String key : previous.keySet()) {
|
||||||
if (loggerPredicate.test(key) && builder.keys().contains(key) == false) {
|
if (loggerPredicate.test(key) && builder.keys().contains(key) == false) {
|
||||||
if (ServerLoggers.LOG_LEVEL_SETTING.getConcreteSetting(key).exists(settings) == false) {
|
if (Loggers.LOG_LEVEL_SETTING.getConcreteSetting(key).exists(settings) == false) {
|
||||||
builder.putNull(key);
|
builder.putNull(key);
|
||||||
} else {
|
} else {
|
||||||
builder.put(key, ServerLoggers.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings).toString());
|
builder.put(key, Loggers.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings).toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -150,12 +150,12 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||||
if ("_root".equals(component)) {
|
if ("_root".equals(component)) {
|
||||||
final String rootLevel = value.get(key);
|
final String rootLevel = value.get(key);
|
||||||
if (rootLevel == null) {
|
if (rootLevel == null) {
|
||||||
ServerLoggers.setLevel(ESLoggerFactory.getRootLogger(), ServerLoggers.LOG_DEFAULT_LEVEL_SETTING.get(settings));
|
Loggers.setLevel(ESLoggerFactory.getRootLogger(), Loggers.LOG_DEFAULT_LEVEL_SETTING.get(settings));
|
||||||
} else {
|
} else {
|
||||||
ServerLoggers.setLevel(ESLoggerFactory.getRootLogger(), rootLevel);
|
Loggers.setLevel(ESLoggerFactory.getRootLogger(), rootLevel);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ServerLoggers.setLevel(ESLoggerFactory.getLogger(component), value.get(key));
|
Loggers.setLevel(ESLoggerFactory.getLogger(component), value.get(key));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -380,8 +380,8 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||||
ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING,
|
ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING,
|
||||||
EsExecutors.PROCESSORS_SETTING,
|
EsExecutors.PROCESSORS_SETTING,
|
||||||
ThreadContext.DEFAULT_HEADERS_SETTING,
|
ThreadContext.DEFAULT_HEADERS_SETTING,
|
||||||
ServerLoggers.LOG_DEFAULT_LEVEL_SETTING,
|
Loggers.LOG_DEFAULT_LEVEL_SETTING,
|
||||||
ServerLoggers.LOG_LEVEL_SETTING,
|
Loggers.LOG_LEVEL_SETTING,
|
||||||
NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING,
|
NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING,
|
||||||
NodeEnvironment.ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING,
|
NodeEnvironment.ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING,
|
||||||
OsService.REFRESH_INTERVAL_SETTING,
|
OsService.REFRESH_INTERVAL_SETTING,
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.common.settings;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.common.inject.Binder;
|
import org.elasticsearch.common.inject.Binder;
|
||||||
import org.elasticsearch.common.inject.Module;
|
import org.elasticsearch.common.inject.Module;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
@ -57,7 +57,7 @@ public class SettingsModule implements Module {
|
||||||
}
|
}
|
||||||
|
|
||||||
public SettingsModule(Settings settings, List<Setting<?>> additionalSettings, List<String> settingsFilter) {
|
public SettingsModule(Settings settings, List<Setting<?>> additionalSettings, List<String> settingsFilter) {
|
||||||
logger = ServerLoggers.getLogger(getClass(), settings);
|
logger = Loggers.getLogger(getClass(), settings);
|
||||||
this.settings = settings;
|
this.settings = settings;
|
||||||
for (Setting<?> setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) {
|
for (Setting<?> setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) {
|
||||||
registerSetting(setting);
|
registerSetting(setting);
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||||
import org.elasticsearch.cluster.service.ClusterApplier;
|
import org.elasticsearch.cluster.service.ClusterApplier;
|
||||||
import org.elasticsearch.cluster.service.MasterService;
|
import org.elasticsearch.cluster.service.MasterService;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.network.NetworkService;
|
import org.elasticsearch.common.network.NetworkService;
|
||||||
import org.elasticsearch.common.settings.ClusterSettings;
|
import org.elasticsearch.common.settings.ClusterSettings;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
|
@ -109,7 +109,7 @@ public class DiscoveryModule {
|
||||||
if (discoverySupplier == null) {
|
if (discoverySupplier == null) {
|
||||||
throw new IllegalArgumentException("Unknown discovery type [" + discoveryType + "]");
|
throw new IllegalArgumentException("Unknown discovery type [" + discoveryType + "]");
|
||||||
}
|
}
|
||||||
ServerLoggers.getLogger(getClass(), settings).info("using discovery type [{}]", discoveryType);
|
Loggers.getLogger(getClass(), settings).info("using discovery type [{}]", discoveryType);
|
||||||
discovery = Objects.requireNonNull(discoverySupplier.get());
|
discovery = Objects.requireNonNull(discoverySupplier.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ import org.elasticsearch.common.Randomness;
|
||||||
import org.elasticsearch.common.SuppressForbidden;
|
import org.elasticsearch.common.SuppressForbidden;
|
||||||
import org.elasticsearch.common.UUIDs;
|
import org.elasticsearch.common.UUIDs;
|
||||||
import org.elasticsearch.common.io.FileSystemUtils;
|
import org.elasticsearch.common.io.FileSystemUtils;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -182,7 +182,7 @@ public final class NodeEnvironment implements Closeable {
|
||||||
locks = null;
|
locks = null;
|
||||||
nodeLockId = -1;
|
nodeLockId = -1;
|
||||||
nodeMetaData = new NodeMetaData(generateNodeId(settings));
|
nodeMetaData = new NodeMetaData(generateNodeId(settings));
|
||||||
logger = ServerLoggers.getLogger(getClass(), Node.addNodeNameIfNeeded(settings, this.nodeMetaData.nodeId()));
|
logger = Loggers.getLogger(getClass(), Node.addNodeNameIfNeeded(settings, this.nodeMetaData.nodeId()));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
final NodePath[] nodePaths = new NodePath[environment.dataWithClusterFiles().length];
|
final NodePath[] nodePaths = new NodePath[environment.dataWithClusterFiles().length];
|
||||||
|
@ -190,7 +190,7 @@ public final class NodeEnvironment implements Closeable {
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
|
|
||||||
// trace logger to debug issues before the default node name is derived from the node id
|
// trace logger to debug issues before the default node name is derived from the node id
|
||||||
Logger startupTraceLogger = ServerLoggers.getLogger(getClass(), settings);
|
Logger startupTraceLogger = Loggers.getLogger(getClass(), settings);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
sharedDataPath = environment.sharedDataFile();
|
sharedDataPath = environment.sharedDataFile();
|
||||||
|
@ -244,7 +244,7 @@ public final class NodeEnvironment implements Closeable {
|
||||||
throw new IllegalStateException(message, lastException);
|
throw new IllegalStateException(message, lastException);
|
||||||
}
|
}
|
||||||
this.nodeMetaData = loadOrCreateNodeMetaData(settings, startupTraceLogger, nodePaths);
|
this.nodeMetaData = loadOrCreateNodeMetaData(settings, startupTraceLogger, nodePaths);
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), Node.addNodeNameIfNeeded(settings, this.nodeMetaData.nodeId()));
|
this.logger = Loggers.getLogger(getClass(), Node.addNodeNameIfNeeded(settings, this.nodeMetaData.nodeId()));
|
||||||
|
|
||||||
this.nodeLockId = nodeLockId;
|
this.nodeLockId = nodeLockId;
|
||||||
this.locks = locks;
|
this.locks = locks;
|
||||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
|
|
||||||
public abstract class AbstractIndexComponent implements IndexComponent {
|
public abstract class AbstractIndexComponent implements IndexComponent {
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ public abstract class AbstractIndexComponent implements IndexComponent {
|
||||||
* Constructs a new index component, with the index name and its settings.
|
* Constructs a new index component, with the index name and its settings.
|
||||||
*/
|
*/
|
||||||
protected AbstractIndexComponent(IndexSettings indexSettings) {
|
protected AbstractIndexComponent(IndexSettings indexSettings) {
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), indexSettings.getSettings(), indexSettings.getIndex());
|
this.logger = Loggers.getLogger(getClass(), indexSettings.getSettings(), indexSettings.getIndex());
|
||||||
this.deprecationLogger = new DeprecationLogger(logger);
|
this.deprecationLogger = new DeprecationLogger(logger);
|
||||||
this.indexSettings = indexSettings;
|
this.indexSettings = indexSettings;
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.apache.logging.log4j.util.Supplier;
|
import org.apache.logging.log4j.util.Supplier;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.shard.IndexEventListener;
|
import org.elasticsearch.index.shard.IndexEventListener;
|
||||||
import org.elasticsearch.index.shard.IndexShard;
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
|
@ -52,7 +52,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this.listeners = Collections.unmodifiableList(new ArrayList<>(listeners));
|
this.listeners = Collections.unmodifiableList(new ArrayList<>(listeners));
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), indexSettings.getSettings(), indexSettings.getIndex());
|
this.logger = Loggers.getLogger(getClass(), indexSettings.getSettings(), indexSettings.getIndex());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -22,7 +22,7 @@ import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.lucene.index.MergePolicy;
|
import org.apache.lucene.index.MergePolicy;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
|
@ -381,7 +381,7 @@ public final class IndexSettings {
|
||||||
this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build();
|
this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build();
|
||||||
this.index = indexMetaData.getIndex();
|
this.index = indexMetaData.getIndex();
|
||||||
version = Version.indexCreated(settings);
|
version = Version.indexCreated(settings);
|
||||||
logger = ServerLoggers.getLogger(getClass(), settings, index);
|
logger = Loggers.getLogger(getClass(), settings, index);
|
||||||
nodeName = Node.NODE_NAME_SETTING.get(settings);
|
nodeName = Node.NODE_NAME_SETTING.get(settings);
|
||||||
this.indexMetaData = indexMetaData;
|
this.indexMetaData = indexMetaData;
|
||||||
numberOfShards = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null);
|
numberOfShards = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null);
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.index;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
@ -87,7 +87,7 @@ public final class IndexingSlowLog implements IndexingOperationListener {
|
||||||
}, Property.Dynamic, Property.IndexScope);
|
}, Property.Dynamic, Property.IndexScope);
|
||||||
|
|
||||||
IndexingSlowLog(IndexSettings indexSettings) {
|
IndexingSlowLog(IndexSettings indexSettings) {
|
||||||
this.indexLogger = ServerLoggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index", indexSettings.getSettings());
|
this.indexLogger = Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index", indexSettings.getSettings());
|
||||||
this.index = indexSettings.getIndex();
|
this.index = indexSettings.getIndex();
|
||||||
|
|
||||||
indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, this::setReformat);
|
indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, this::setReformat);
|
||||||
|
@ -117,7 +117,7 @@ public final class IndexingSlowLog implements IndexingOperationListener {
|
||||||
|
|
||||||
private void setLevel(SlowLogLevel level) {
|
private void setLevel(SlowLogLevel level) {
|
||||||
this.level = level;
|
this.level = level;
|
||||||
ServerLoggers.setLevel(this.indexLogger, level.name());
|
Loggers.setLevel(this.indexLogger, level.name());
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setWarnThreshold(TimeValue warnThreshold) {
|
private void setWarnThreshold(TimeValue warnThreshold) {
|
||||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
@ -81,8 +81,8 @@ public final class SearchSlowLog implements SearchOperationListener {
|
||||||
|
|
||||||
public SearchSlowLog(IndexSettings indexSettings) {
|
public SearchSlowLog(IndexSettings indexSettings) {
|
||||||
|
|
||||||
this.queryLogger = ServerLoggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query", indexSettings.getSettings());
|
this.queryLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query", indexSettings.getSettings());
|
||||||
this.fetchLogger = ServerLoggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch", indexSettings.getSettings());
|
this.fetchLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch", indexSettings.getSettings());
|
||||||
|
|
||||||
indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING, this::setQueryWarnThreshold);
|
indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING, this::setQueryWarnThreshold);
|
||||||
this.queryWarnThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING).nanos();
|
this.queryWarnThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING).nanos();
|
||||||
|
@ -108,8 +108,8 @@ public final class SearchSlowLog implements SearchOperationListener {
|
||||||
|
|
||||||
private void setLevel(SlowLogLevel level) {
|
private void setLevel(SlowLogLevel level) {
|
||||||
this.level = level;
|
this.level = level;
|
||||||
ServerLoggers.setLevel(queryLogger, level.name());
|
Loggers.setLevel(queryLogger, level.name());
|
||||||
ServerLoggers.setLevel(fetchLogger, level.name());
|
Loggers.setLevel(fetchLogger, level.name());
|
||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public void onQueryPhase(SearchContext context, long tookInNanos) {
|
public void onQueryPhase(SearchContext context, long tookInNanos) {
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.MergePolicy;
|
import org.apache.lucene.index.MergePolicy;
|
||||||
import org.apache.lucene.index.MergeScheduler;
|
import org.apache.lucene.index.MergeScheduler;
|
||||||
import org.apache.lucene.index.OneMergeHelper;
|
import org.apache.lucene.index.OneMergeHelper;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.metrics.CounterMetric;
|
import org.elasticsearch.common.metrics.CounterMetric;
|
||||||
import org.elasticsearch.common.metrics.MeanMetric;
|
import org.elasticsearch.common.metrics.MeanMetric;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -71,7 +71,7 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler {
|
||||||
this.config = indexSettings.getMergeSchedulerConfig();
|
this.config = indexSettings.getMergeSchedulerConfig();
|
||||||
this.shardId = shardId;
|
this.shardId = shardId;
|
||||||
this.indexSettings = indexSettings.getSettings();
|
this.indexSettings = indexSettings.getSettings();
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), this.indexSettings, shardId);
|
this.logger = Loggers.getLogger(getClass(), this.indexSettings, shardId);
|
||||||
refreshConfig();
|
refreshConfig();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.common.lucene.uid.Versions;
|
import org.elasticsearch.common.lucene.uid.Versions;
|
||||||
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver;
|
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver;
|
||||||
|
@ -130,7 +130,7 @@ public abstract class Engine implements Closeable {
|
||||||
this.shardId = engineConfig.getShardId();
|
this.shardId = engineConfig.getShardId();
|
||||||
this.allocationId = engineConfig.getAllocationId();
|
this.allocationId = engineConfig.getAllocationId();
|
||||||
this.store = engineConfig.getStore();
|
this.store = engineConfig.getStore();
|
||||||
this.logger = ServerLoggers.getLogger(Engine.class, // we use the engine class directly here to make sure all subclasses have the same logger name
|
this.logger = Loggers.getLogger(Engine.class, // we use the engine class directly here to make sure all subclasses have the same logger name
|
||||||
engineConfig.getIndexSettings().getSettings(), engineConfig.getShardId());
|
engineConfig.getIndexSettings().getSettings(), engineConfig.getShardId());
|
||||||
this.eventListener = engineConfig.getEventListener();
|
this.eventListener = engineConfig.getEventListener();
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.shard;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
|
||||||
public abstract class AbstractIndexShardComponent implements IndexShardComponent {
|
public abstract class AbstractIndexShardComponent implements IndexShardComponent {
|
||||||
|
@ -34,7 +34,7 @@ public abstract class AbstractIndexShardComponent implements IndexShardComponent
|
||||||
protected AbstractIndexShardComponent(ShardId shardId, IndexSettings indexSettings) {
|
protected AbstractIndexShardComponent(ShardId shardId, IndexSettings indexSettings) {
|
||||||
this.shardId = shardId;
|
this.shardId = shardId;
|
||||||
this.indexSettings = indexSettings;
|
this.indexSettings = indexSettings;
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), this.indexSettings.getSettings(), shardId);
|
this.logger = Loggers.getLogger(getClass(), this.indexSettings.getSettings(), shardId);
|
||||||
this.deprecationLogger = new DeprecationLogger(logger);
|
this.deprecationLogger = new DeprecationLogger(logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.common.lucene.store.ByteArrayIndexInput;
|
import org.elasticsearch.common.lucene.store.ByteArrayIndexInput;
|
||||||
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
||||||
|
@ -159,7 +159,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
||||||
public Store(ShardId shardId, IndexSettings indexSettings, DirectoryService directoryService, ShardLock shardLock, OnClose onClose) throws IOException {
|
public Store(ShardId shardId, IndexSettings indexSettings, DirectoryService directoryService, ShardLock shardLock, OnClose onClose) throws IOException {
|
||||||
super(shardId, indexSettings);
|
super(shardId, indexSettings);
|
||||||
final Settings settings = indexSettings.getSettings();
|
final Settings settings = indexSettings.getSettings();
|
||||||
this.directory = new StoreDirectory(directoryService.newDirectory(), ServerLoggers.getLogger("index.store.deletes", settings, shardId));
|
this.directory = new StoreDirectory(directoryService.newDirectory(), Loggers.getLogger("index.store.deletes", settings, shardId));
|
||||||
this.shardLock = shardLock;
|
this.shardLock = shardLock;
|
||||||
this.onClose = onClose;
|
this.onClose = onClose;
|
||||||
final TimeValue refreshInterval = indexSettings.getValue(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING);
|
final TimeValue refreshInterval = indexSettings.getValue(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING);
|
||||||
|
|
|
@ -40,7 +40,7 @@ import org.elasticsearch.common.StopWatch;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.io.Streams;
|
import org.elasticsearch.common.io.Streams;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
|
@ -120,7 +120,7 @@ public class RecoverySourceHandler {
|
||||||
this.recoveryTarget = recoveryTarget;
|
this.recoveryTarget = recoveryTarget;
|
||||||
this.request = request;
|
this.request = request;
|
||||||
this.shardId = this.request.shardId().id();
|
this.shardId = this.request.shardId().id();
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), nodeSettings, request.shardId(), "recover to " + request.targetNode().getName());
|
this.logger = Loggers.getLogger(getClass(), nodeSettings, request.shardId(), "recover to " + request.targetNode().getName());
|
||||||
this.chunkSizeInBytes = fileChunkSizeInBytes;
|
this.chunkSizeInBytes = fileChunkSizeInBytes;
|
||||||
this.response = new RecoveryResponse();
|
this.response = new RecoveryResponse();
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,7 @@ import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.common.UUIDs;
|
import org.elasticsearch.common.UUIDs;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.common.util.CancellableThreads;
|
import org.elasticsearch.common.util.CancellableThreads;
|
||||||
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
|
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
|
||||||
|
@ -117,7 +117,7 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget
|
||||||
this.cancellableThreads = new CancellableThreads();
|
this.cancellableThreads = new CancellableThreads();
|
||||||
this.recoveryId = idGenerator.incrementAndGet();
|
this.recoveryId = idGenerator.incrementAndGet();
|
||||||
this.listener = listener;
|
this.listener = listener;
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), indexShard.indexSettings().getSettings(), indexShard.shardId());
|
this.logger = Loggers.getLogger(getClass(), indexShard.indexSettings().getSettings(), indexShard.shardId());
|
||||||
this.indexShard = indexShard;
|
this.indexShard = indexShard;
|
||||||
this.sourceNode = sourceNode;
|
this.sourceNode = sourceNode;
|
||||||
this.shardId = indexShard.shardId();
|
this.shardId = indexShard.shardId();
|
||||||
|
|
|
@ -67,7 +67,6 @@ import org.elasticsearch.common.inject.util.Providers;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.network.NetworkAddress;
|
import org.elasticsearch.common.network.NetworkAddress;
|
||||||
import org.elasticsearch.common.network.NetworkModule;
|
import org.elasticsearch.common.network.NetworkModule;
|
||||||
|
@ -272,7 +271,7 @@ public class Node implements Closeable {
|
||||||
throw new IllegalStateException("Failed to create node environment", ex);
|
throw new IllegalStateException("Failed to create node environment", ex);
|
||||||
}
|
}
|
||||||
final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings);
|
final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings);
|
||||||
Logger logger = ServerLoggers.getLogger(Node.class, tmpSettings);
|
Logger logger = Loggers.getLogger(Node.class, tmpSettings);
|
||||||
final String nodeId = nodeEnvironment.nodeId();
|
final String nodeId = nodeEnvironment.nodeId();
|
||||||
tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeId);
|
tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeId);
|
||||||
// this must be captured after the node name is possibly added to the settings
|
// this must be captured after the node name is possibly added to the settings
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.common.component.AbstractComponent;
|
||||||
import org.elasticsearch.common.component.LifecycleComponent;
|
import org.elasticsearch.common.component.LifecycleComponent;
|
||||||
import org.elasticsearch.common.inject.Module;
|
import org.elasticsearch.common.inject.Module;
|
||||||
import org.elasticsearch.common.io.FileSystemUtils;
|
import org.elasticsearch.common.io.FileSystemUtils;
|
||||||
|
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
|
@ -562,6 +563,7 @@ public class PluginsService extends AbstractComponent {
|
||||||
List<String> exts = bundle.plugin.getExtendedPlugins();
|
List<String> exts = bundle.plugin.getExtendedPlugins();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
final Logger logger = ESLoggerFactory.getLogger(JarHell.class);
|
||||||
Set<URL> urls = new HashSet<>();
|
Set<URL> urls = new HashSet<>();
|
||||||
for (String extendedPlugin : exts) {
|
for (String extendedPlugin : exts) {
|
||||||
Set<URL> pluginUrls = transitiveUrls.get(extendedPlugin);
|
Set<URL> pluginUrls = transitiveUrls.get(extendedPlugin);
|
||||||
|
@ -582,11 +584,11 @@ public class PluginsService extends AbstractComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
urls.addAll(pluginUrls);
|
urls.addAll(pluginUrls);
|
||||||
JarHell.checkJarHell(urls); // check jarhell as we add each extended plugin's urls
|
JarHell.checkJarHell(urls, logger::debug); // check jarhell as we add each extended plugin's urls
|
||||||
}
|
}
|
||||||
|
|
||||||
urls.addAll(bundle.urls);
|
urls.addAll(bundle.urls);
|
||||||
JarHell.checkJarHell(urls); // check jarhell of each extended plugin against this plugin
|
JarHell.checkJarHell(urls, logger::debug); // check jarhell of each extended plugin against this plugin
|
||||||
transitiveUrls.put(bundle.plugin.getName(), urls);
|
transitiveUrls.put(bundle.plugin.getName(), urls);
|
||||||
|
|
||||||
Set<URL> classpath = JarHell.parseClassPath();
|
Set<URL> classpath = JarHell.parseClassPath();
|
||||||
|
@ -599,7 +601,7 @@ public class PluginsService extends AbstractComponent {
|
||||||
// check we don't have conflicting classes
|
// check we don't have conflicting classes
|
||||||
Set<URL> union = new HashSet<>(classpath);
|
Set<URL> union = new HashSet<>(classpath);
|
||||||
union.addAll(bundle.urls);
|
union.addAll(bundle.urls);
|
||||||
JarHell.checkJarHell(union);
|
JarHell.checkJarHell(union, logger::debug);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new IllegalStateException("failed to load plugin " + bundle.plugin.getName() + " due to jar hell", e);
|
throw new IllegalStateException("failed to load plugin " + bundle.plugin.getName() + " due to jar hell", e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.apache.lucene.util.Constants;
|
import org.apache.lucene.util.Constants;
|
||||||
import org.elasticsearch.common.io.PathUtils;
|
import org.elasticsearch.common.io.PathUtils;
|
||||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.MockLogAppender;
|
import org.elasticsearch.test.MockLogAppender;
|
||||||
|
|
||||||
|
@ -83,11 +83,11 @@ public class MaxMapCountCheckTests extends ESTestCase {
|
||||||
"I/O exception while trying to read [{}]",
|
"I/O exception while trying to read [{}]",
|
||||||
new Object[] { procSysVmMaxMapCountPath },
|
new Object[] { procSysVmMaxMapCountPath },
|
||||||
e -> ioException == e));
|
e -> ioException == e));
|
||||||
ServerLoggers.addAppender(logger, appender);
|
Loggers.addAppender(logger, appender);
|
||||||
assertThat(check.getMaxMapCount(logger), equalTo(-1L));
|
assertThat(check.getMaxMapCount(logger), equalTo(-1L));
|
||||||
appender.assertAllExpectationsMatched();
|
appender.assertAllExpectationsMatched();
|
||||||
verify(reader).close();
|
verify(reader).close();
|
||||||
ServerLoggers.removeAppender(logger, appender);
|
Loggers.removeAppender(logger, appender);
|
||||||
appender.stop();
|
appender.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,11 +105,11 @@ public class MaxMapCountCheckTests extends ESTestCase {
|
||||||
"unable to parse vm.max_map_count [{}]",
|
"unable to parse vm.max_map_count [{}]",
|
||||||
new Object[] { "eof" },
|
new Object[] { "eof" },
|
||||||
e -> e instanceof NumberFormatException && e.getMessage().equals("For input string: \"eof\"")));
|
e -> e instanceof NumberFormatException && e.getMessage().equals("For input string: \"eof\"")));
|
||||||
ServerLoggers.addAppender(logger, appender);
|
Loggers.addAppender(logger, appender);
|
||||||
assertThat(check.getMaxMapCount(logger), equalTo(-1L));
|
assertThat(check.getMaxMapCount(logger), equalTo(-1L));
|
||||||
appender.assertAllExpectationsMatched();
|
appender.assertAllExpectationsMatched();
|
||||||
verify(reader).close();
|
verify(reader).close();
|
||||||
ServerLoggers.removeAppender(logger, appender);
|
Loggers.removeAppender(logger, appender);
|
||||||
appender.stop();
|
appender.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDeci
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
|
||||||
import org.elasticsearch.common.Priority;
|
import org.elasticsearch.common.Priority;
|
||||||
import org.elasticsearch.common.io.FileSystemUtils;
|
import org.elasticsearch.common.io.FileSystemUtils;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
@ -343,7 +342,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
|
||||||
new MockLogAppender.UnseenEventExpectation("no completed message logged on dry run",
|
new MockLogAppender.UnseenEventExpectation("no completed message logged on dry run",
|
||||||
TransportClusterRerouteAction.class.getName(), Level.INFO, "allocated an empty primary*")
|
TransportClusterRerouteAction.class.getName(), Level.INFO, "allocated an empty primary*")
|
||||||
);
|
);
|
||||||
ServerLoggers.addAppender(actionLogger, dryRunMockLog);
|
Loggers.addAppender(actionLogger, dryRunMockLog);
|
||||||
|
|
||||||
AllocationCommand dryRunAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true);
|
AllocationCommand dryRunAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true);
|
||||||
ClusterRerouteResponse dryRunResponse = client().admin().cluster().prepareReroute()
|
ClusterRerouteResponse dryRunResponse = client().admin().cluster().prepareReroute()
|
||||||
|
@ -358,7 +357,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
|
||||||
|
|
||||||
dryRunMockLog.assertAllExpectationsMatched();
|
dryRunMockLog.assertAllExpectationsMatched();
|
||||||
dryRunMockLog.stop();
|
dryRunMockLog.stop();
|
||||||
ServerLoggers.removeAppender(actionLogger, dryRunMockLog);
|
Loggers.removeAppender(actionLogger, dryRunMockLog);
|
||||||
|
|
||||||
MockLogAppender allocateMockLog = new MockLogAppender();
|
MockLogAppender allocateMockLog = new MockLogAppender();
|
||||||
allocateMockLog.start();
|
allocateMockLog.start();
|
||||||
|
@ -370,7 +369,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
|
||||||
new MockLogAppender.UnseenEventExpectation("no message for second allocate empty primary",
|
new MockLogAppender.UnseenEventExpectation("no message for second allocate empty primary",
|
||||||
TransportClusterRerouteAction.class.getName(), Level.INFO, "allocated an empty primary*" + nodeName2 + "*")
|
TransportClusterRerouteAction.class.getName(), Level.INFO, "allocated an empty primary*" + nodeName2 + "*")
|
||||||
);
|
);
|
||||||
ServerLoggers.addAppender(actionLogger, allocateMockLog);
|
Loggers.addAppender(actionLogger, allocateMockLog);
|
||||||
|
|
||||||
AllocationCommand yesDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true);
|
AllocationCommand yesDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true);
|
||||||
AllocationCommand noDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand("noexist", 1, nodeName2, true);
|
AllocationCommand noDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand("noexist", 1, nodeName2, true);
|
||||||
|
@ -386,7 +385,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
|
||||||
|
|
||||||
allocateMockLog.assertAllExpectationsMatched();
|
allocateMockLog.assertAllExpectationsMatched();
|
||||||
allocateMockLog.stop();
|
allocateMockLog.stop();
|
||||||
ServerLoggers.removeAppender(actionLogger, allocateMockLog);
|
Loggers.removeAppender(actionLogger, allocateMockLog);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testClusterRerouteWithBlocks() throws Exception {
|
public void testClusterRerouteWithBlocks() throws Exception {
|
||||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
|
@ -63,7 +63,7 @@ public class TemplateUpgradeServiceIT extends ESIntegTestCase {
|
||||||
protected final Settings settings;
|
protected final Settings settings;
|
||||||
|
|
||||||
public TestPlugin(Settings settings) {
|
public TestPlugin(Settings settings) {
|
||||||
this.logger = ServerLoggers.getLogger(getClass(), settings);
|
this.logger = Loggers.getLogger(getClass(), settings);
|
||||||
this.settings = settings;
|
this.settings = settings;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.cluster.NodeConnectionsService;
|
||||||
import org.elasticsearch.cluster.block.ClusterBlocks;
|
import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.ClusterSettings;
|
import org.elasticsearch.common.settings.ClusterSettings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -130,7 +129,7 @@ public class ClusterApplierServiceTests extends ESTestCase {
|
||||||
"*failed to execute cluster state applier in [2s]*"));
|
"*failed to execute cluster state applier in [2s]*"));
|
||||||
|
|
||||||
Logger clusterLogger = Loggers.getLogger("org.elasticsearch.cluster.service");
|
Logger clusterLogger = Loggers.getLogger("org.elasticsearch.cluster.service");
|
||||||
ServerLoggers.addAppender(clusterLogger, mockAppender);
|
Loggers.addAppender(clusterLogger, mockAppender);
|
||||||
try {
|
try {
|
||||||
final CountDownLatch latch = new CountDownLatch(3);
|
final CountDownLatch latch = new CountDownLatch(3);
|
||||||
clusterApplierService.currentTimeOverride = System.nanoTime();
|
clusterApplierService.currentTimeOverride = System.nanoTime();
|
||||||
|
@ -180,7 +179,7 @@ public class ClusterApplierServiceTests extends ESTestCase {
|
||||||
});
|
});
|
||||||
latch.await();
|
latch.await();
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.removeAppender(clusterLogger, mockAppender);
|
Loggers.removeAppender(clusterLogger, mockAppender);
|
||||||
mockAppender.stop();
|
mockAppender.stop();
|
||||||
}
|
}
|
||||||
mockAppender.assertAllExpectationsMatched();
|
mockAppender.assertAllExpectationsMatched();
|
||||||
|
@ -210,7 +209,7 @@ public class ClusterApplierServiceTests extends ESTestCase {
|
||||||
"*cluster state applier task [test3] took [34s] above the warn threshold of *"));
|
"*cluster state applier task [test3] took [34s] above the warn threshold of *"));
|
||||||
|
|
||||||
Logger clusterLogger = Loggers.getLogger("org.elasticsearch.cluster.service");
|
Logger clusterLogger = Loggers.getLogger("org.elasticsearch.cluster.service");
|
||||||
ServerLoggers.addAppender(clusterLogger, mockAppender);
|
Loggers.addAppender(clusterLogger, mockAppender);
|
||||||
try {
|
try {
|
||||||
final CountDownLatch latch = new CountDownLatch(4);
|
final CountDownLatch latch = new CountDownLatch(4);
|
||||||
final CountDownLatch processedFirstTask = new CountDownLatch(1);
|
final CountDownLatch processedFirstTask = new CountDownLatch(1);
|
||||||
|
@ -276,7 +275,7 @@ public class ClusterApplierServiceTests extends ESTestCase {
|
||||||
});
|
});
|
||||||
latch.await();
|
latch.await();
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.removeAppender(clusterLogger, mockAppender);
|
Loggers.removeAppender(clusterLogger, mockAppender);
|
||||||
mockAppender.stop();
|
mockAppender.stop();
|
||||||
}
|
}
|
||||||
mockAppender.assertAllExpectationsMatched();
|
mockAppender.assertAllExpectationsMatched();
|
||||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||||
import org.elasticsearch.common.Priority;
|
import org.elasticsearch.common.Priority;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
@ -232,7 +231,7 @@ public class MasterServiceTests extends ESTestCase {
|
||||||
"*processing [test3]: took [3s] done publishing updated cluster state (version: *, uuid: *)"));
|
"*processing [test3]: took [3s] done publishing updated cluster state (version: *, uuid: *)"));
|
||||||
|
|
||||||
Logger clusterLogger = Loggers.getLogger(masterService.getClass().getPackage().getName());
|
Logger clusterLogger = Loggers.getLogger(masterService.getClass().getPackage().getName());
|
||||||
ServerLoggers.addAppender(clusterLogger, mockAppender);
|
Loggers.addAppender(clusterLogger, mockAppender);
|
||||||
try {
|
try {
|
||||||
final CountDownLatch latch = new CountDownLatch(4);
|
final CountDownLatch latch = new CountDownLatch(4);
|
||||||
masterService.currentTimeOverride = System.nanoTime();
|
masterService.currentTimeOverride = System.nanoTime();
|
||||||
|
@ -307,7 +306,7 @@ public class MasterServiceTests extends ESTestCase {
|
||||||
});
|
});
|
||||||
latch.await();
|
latch.await();
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.removeAppender(clusterLogger, mockAppender);
|
Loggers.removeAppender(clusterLogger, mockAppender);
|
||||||
mockAppender.stop();
|
mockAppender.stop();
|
||||||
}
|
}
|
||||||
mockAppender.assertAllExpectationsMatched();
|
mockAppender.assertAllExpectationsMatched();
|
||||||
|
@ -579,7 +578,7 @@ public class MasterServiceTests extends ESTestCase {
|
||||||
"*cluster state update task [test4] took [34s] above the warn threshold of *"));
|
"*cluster state update task [test4] took [34s] above the warn threshold of *"));
|
||||||
|
|
||||||
Logger clusterLogger = Loggers.getLogger(masterService.getClass().getPackage().getName());
|
Logger clusterLogger = Loggers.getLogger(masterService.getClass().getPackage().getName());
|
||||||
ServerLoggers.addAppender(clusterLogger, mockAppender);
|
Loggers.addAppender(clusterLogger, mockAppender);
|
||||||
try {
|
try {
|
||||||
final CountDownLatch latch = new CountDownLatch(5);
|
final CountDownLatch latch = new CountDownLatch(5);
|
||||||
final CountDownLatch processedFirstTask = new CountDownLatch(1);
|
final CountDownLatch processedFirstTask = new CountDownLatch(1);
|
||||||
|
@ -675,7 +674,7 @@ public class MasterServiceTests extends ESTestCase {
|
||||||
});
|
});
|
||||||
latch.await();
|
latch.await();
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.removeAppender(clusterLogger, mockAppender);
|
Loggers.removeAppender(clusterLogger, mockAppender);
|
||||||
mockAppender.stop();
|
mockAppender.stop();
|
||||||
}
|
}
|
||||||
mockAppender.assertAllExpectationsMatched();
|
mockAppender.assertAllExpectationsMatched();
|
||||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
|
||||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.index.IndexModule;
|
import org.elasticsearch.index.IndexModule;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -795,8 +795,8 @@ public class ScopedSettingsTests extends ESTestCase {
|
||||||
settings.applySettings(Settings.builder().build());
|
settings.applySettings(Settings.builder().build());
|
||||||
assertEquals(property, ESLoggerFactory.getLogger("test").getLevel());
|
assertEquals(property, ESLoggerFactory.getLogger("test").getLevel());
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.setLevel(ESLoggerFactory.getRootLogger(), level);
|
Loggers.setLevel(ESLoggerFactory.getRootLogger(), level);
|
||||||
ServerLoggers.setLevel(ESLoggerFactory.getLogger("test"), testLevel);
|
Loggers.setLevel(ESLoggerFactory.getLogger("test"), testLevel);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -811,7 +811,7 @@ public class ScopedSettingsTests extends ESTestCase {
|
||||||
settings.applySettings(Settings.builder().build()); // here we fall back to 'logger.level' which is our default.
|
settings.applySettings(Settings.builder().build()); // here we fall back to 'logger.level' which is our default.
|
||||||
assertEquals(Level.ERROR, ESLoggerFactory.getRootLogger().getLevel());
|
assertEquals(Level.ERROR, ESLoggerFactory.getRootLogger().getLevel());
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.setLevel(ESLoggerFactory.getRootLogger(), level);
|
Loggers.setLevel(ESLoggerFactory.getRootLogger(), level);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.common.settings;
|
||||||
import org.apache.logging.log4j.Level;
|
import org.apache.logging.log4j.Level;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
|
@ -134,14 +133,14 @@ public class SettingsFilterTests extends ESTestCase {
|
||||||
MockLogAppender.LoggingExpectation ... expectations) throws IllegalAccessException {
|
MockLogAppender.LoggingExpectation ... expectations) throws IllegalAccessException {
|
||||||
Logger testLogger = Loggers.getLogger("org.elasticsearch.test");
|
Logger testLogger = Loggers.getLogger("org.elasticsearch.test");
|
||||||
MockLogAppender appender = new MockLogAppender();
|
MockLogAppender appender = new MockLogAppender();
|
||||||
ServerLoggers.addAppender(testLogger, appender);
|
Loggers.addAppender(testLogger, appender);
|
||||||
try {
|
try {
|
||||||
appender.start();
|
appender.start();
|
||||||
Arrays.stream(expectations).forEach(appender::addExpectation);
|
Arrays.stream(expectations).forEach(appender::addExpectation);
|
||||||
consumer.accept(testLogger);
|
consumer.accept(testLogger);
|
||||||
appender.assertAllExpectationsMatched();
|
appender.assertAllExpectationsMatched();
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.removeAppender(testLogger, appender);
|
Loggers.removeAppender(testLogger, appender);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.logging.log4j.core.appender.AbstractAppender;
|
||||||
import org.apache.logging.log4j.core.filter.RegexFilter;
|
import org.apache.logging.log4j.core.filter.RegexFilter;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -72,8 +71,8 @@ public class MergeSchedulerSettingsTests extends ESTestCase {
|
||||||
MockAppender mockAppender = new MockAppender("testUpdateAutoThrottleSettings");
|
MockAppender mockAppender = new MockAppender("testUpdateAutoThrottleSettings");
|
||||||
mockAppender.start();
|
mockAppender.start();
|
||||||
final Logger settingsLogger = Loggers.getLogger("org.elasticsearch.common.settings.IndexScopedSettings");
|
final Logger settingsLogger = Loggers.getLogger("org.elasticsearch.common.settings.IndexScopedSettings");
|
||||||
ServerLoggers.addAppender(settingsLogger, mockAppender);
|
Loggers.addAppender(settingsLogger, mockAppender);
|
||||||
ServerLoggers.setLevel(settingsLogger, Level.TRACE);
|
Loggers.setLevel(settingsLogger, Level.TRACE);
|
||||||
try {
|
try {
|
||||||
Settings.Builder builder = Settings.builder()
|
Settings.Builder builder = Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||||
|
@ -92,9 +91,9 @@ public class MergeSchedulerSettingsTests extends ESTestCase {
|
||||||
assertTrue(mockAppender.sawUpdateAutoThrottle);
|
assertTrue(mockAppender.sawUpdateAutoThrottle);
|
||||||
assertEquals(settings.getMergeSchedulerConfig().isAutoThrottle(), false);
|
assertEquals(settings.getMergeSchedulerConfig().isAutoThrottle(), false);
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.removeAppender(settingsLogger, mockAppender);
|
Loggers.removeAppender(settingsLogger, mockAppender);
|
||||||
mockAppender.stop();
|
mockAppender.stop();
|
||||||
ServerLoggers.setLevel(settingsLogger, (Level) null);
|
Loggers.setLevel(settingsLogger, (Level) null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,8 +102,8 @@ public class MergeSchedulerSettingsTests extends ESTestCase {
|
||||||
MockAppender mockAppender = new MockAppender("testUpdateAutoThrottleSettings");
|
MockAppender mockAppender = new MockAppender("testUpdateAutoThrottleSettings");
|
||||||
mockAppender.start();
|
mockAppender.start();
|
||||||
final Logger settingsLogger = Loggers.getLogger("org.elasticsearch.common.settings.IndexScopedSettings");
|
final Logger settingsLogger = Loggers.getLogger("org.elasticsearch.common.settings.IndexScopedSettings");
|
||||||
ServerLoggers.addAppender(settingsLogger, mockAppender);
|
Loggers.addAppender(settingsLogger, mockAppender);
|
||||||
ServerLoggers.setLevel(settingsLogger, Level.TRACE);
|
Loggers.setLevel(settingsLogger, Level.TRACE);
|
||||||
try {
|
try {
|
||||||
Settings.Builder builder = Settings.builder()
|
Settings.Builder builder = Settings.builder()
|
||||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||||
|
@ -124,9 +123,9 @@ public class MergeSchedulerSettingsTests extends ESTestCase {
|
||||||
// Make sure we log the change:
|
// Make sure we log the change:
|
||||||
assertTrue(mockAppender.sawUpdateMaxThreadCount);
|
assertTrue(mockAppender.sawUpdateMaxThreadCount);
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.removeAppender(settingsLogger, mockAppender);
|
Loggers.removeAppender(settingsLogger, mockAppender);
|
||||||
mockAppender.stop();
|
mockAppender.stop();
|
||||||
ServerLoggers.setLevel(settingsLogger, (Level) null);
|
Loggers.setLevel(settingsLogger, (Level) null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -81,7 +81,6 @@ import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||||
import org.elasticsearch.common.lucene.uid.Versions;
|
import org.elasticsearch.common.lucene.uid.Versions;
|
||||||
|
@ -1929,8 +1928,8 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
|
|
||||||
Logger rootLogger = LogManager.getRootLogger();
|
Logger rootLogger = LogManager.getRootLogger();
|
||||||
Level savedLevel = rootLogger.getLevel();
|
Level savedLevel = rootLogger.getLevel();
|
||||||
ServerLoggers.addAppender(rootLogger, mockAppender);
|
Loggers.addAppender(rootLogger, mockAppender);
|
||||||
ServerLoggers.setLevel(rootLogger, Level.DEBUG);
|
Loggers.setLevel(rootLogger, Level.DEBUG);
|
||||||
rootLogger = LogManager.getRootLogger();
|
rootLogger = LogManager.getRootLogger();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -1941,15 +1940,15 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
assertFalse(mockAppender.sawIndexWriterMessage);
|
assertFalse(mockAppender.sawIndexWriterMessage);
|
||||||
|
|
||||||
// Again, with TRACE, which should log IndexWriter output:
|
// Again, with TRACE, which should log IndexWriter output:
|
||||||
ServerLoggers.setLevel(rootLogger, Level.TRACE);
|
Loggers.setLevel(rootLogger, Level.TRACE);
|
||||||
engine.index(indexForDoc(doc));
|
engine.index(indexForDoc(doc));
|
||||||
engine.flush();
|
engine.flush();
|
||||||
assertTrue(mockAppender.sawIndexWriterMessage);
|
assertTrue(mockAppender.sawIndexWriterMessage);
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.removeAppender(rootLogger, mockAppender);
|
Loggers.removeAppender(rootLogger, mockAppender);
|
||||||
mockAppender.stop();
|
mockAppender.stop();
|
||||||
ServerLoggers.setLevel(rootLogger, savedLevel);
|
Loggers.setLevel(rootLogger, savedLevel);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2217,8 +2216,8 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
|
|
||||||
final Logger iwIFDLogger = Loggers.getLogger("org.elasticsearch.index.engine.Engine.IFD");
|
final Logger iwIFDLogger = Loggers.getLogger("org.elasticsearch.index.engine.Engine.IFD");
|
||||||
|
|
||||||
ServerLoggers.addAppender(iwIFDLogger, mockAppender);
|
Loggers.addAppender(iwIFDLogger, mockAppender);
|
||||||
ServerLoggers.setLevel(iwIFDLogger, Level.DEBUG);
|
Loggers.setLevel(iwIFDLogger, Level.DEBUG);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// First, with DEBUG, which should NOT log IndexWriter output:
|
// First, with DEBUG, which should NOT log IndexWriter output:
|
||||||
|
@ -2229,16 +2228,16 @@ public class InternalEngineTests extends EngineTestCase {
|
||||||
assertFalse(mockAppender.sawIndexWriterIFDMessage);
|
assertFalse(mockAppender.sawIndexWriterIFDMessage);
|
||||||
|
|
||||||
// Again, with TRACE, which should only log IndexWriter IFD output:
|
// Again, with TRACE, which should only log IndexWriter IFD output:
|
||||||
ServerLoggers.setLevel(iwIFDLogger, Level.TRACE);
|
Loggers.setLevel(iwIFDLogger, Level.TRACE);
|
||||||
engine.index(indexForDoc(doc));
|
engine.index(indexForDoc(doc));
|
||||||
engine.flush();
|
engine.flush();
|
||||||
assertFalse(mockAppender.sawIndexWriterMessage);
|
assertFalse(mockAppender.sawIndexWriterMessage);
|
||||||
assertTrue(mockAppender.sawIndexWriterIFDMessage);
|
assertTrue(mockAppender.sawIndexWriterIFDMessage);
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
ServerLoggers.removeAppender(iwIFDLogger, mockAppender);
|
Loggers.removeAppender(iwIFDLogger, mockAppender);
|
||||||
mockAppender.stop();
|
mockAppender.stop();
|
||||||
ServerLoggers.setLevel(iwIFDLogger, (Level) null);
|
Loggers.setLevel(iwIFDLogger, (Level) null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,12 +20,14 @@
|
||||||
package org.elasticsearch.bootstrap;
|
package org.elasticsearch.bootstrap;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.RandomizedRunner;
|
import com.carrotsearch.randomizedtesting.RandomizedRunner;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.SuppressForbidden;
|
import org.elasticsearch.common.SuppressForbidden;
|
||||||
import org.elasticsearch.common.io.FileSystemUtils;
|
import org.elasticsearch.common.io.FileSystemUtils;
|
||||||
import org.elasticsearch.common.io.PathUtils;
|
import org.elasticsearch.common.io.PathUtils;
|
||||||
|
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||||
import org.elasticsearch.common.network.IfConfig;
|
import org.elasticsearch.common.network.IfConfig;
|
||||||
import org.elasticsearch.plugins.PluginInfo;
|
import org.elasticsearch.plugins.PluginInfo;
|
||||||
import org.elasticsearch.secure_sm.SecureSM;
|
import org.elasticsearch.secure_sm.SecureSM;
|
||||||
|
@ -86,7 +88,8 @@ public class BootstrapForTesting {
|
||||||
|
|
||||||
// check for jar hell
|
// check for jar hell
|
||||||
try {
|
try {
|
||||||
JarHell.checkJarHell();
|
final Logger logger = ESLoggerFactory.getLogger(JarHell.class);
|
||||||
|
JarHell.checkJarHell(logger::debug);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException("found jar hell in test classpath", e);
|
throw new RuntimeException("found jar hell in test classpath", e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.test.junit.listeners;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||||
import org.junit.runner.Description;
|
import org.junit.runner.Description;
|
||||||
|
@ -107,7 +106,7 @@ public class LoggingListener extends RunListener {
|
||||||
}
|
}
|
||||||
for (final Map.Entry<String, String> entry : map.entrySet()) {
|
for (final Map.Entry<String, String> entry : map.entrySet()) {
|
||||||
final Logger logger = resolveLogger(entry.getKey());
|
final Logger logger = resolveLogger(entry.getKey());
|
||||||
ServerLoggers.setLevel(logger, entry.getValue());
|
Loggers.setLevel(logger, entry.getValue());
|
||||||
}
|
}
|
||||||
return existing;
|
return existing;
|
||||||
}
|
}
|
||||||
|
@ -146,7 +145,7 @@ public class LoggingListener extends RunListener {
|
||||||
private Map<String, String> reset(final Map<String, String> map) {
|
private Map<String, String> reset(final Map<String, String> map) {
|
||||||
for (final Map.Entry<String, String> previousLogger : map.entrySet()) {
|
for (final Map.Entry<String, String> previousLogger : map.entrySet()) {
|
||||||
final Logger logger = resolveLogger(previousLogger.getKey());
|
final Logger logger = resolveLogger(previousLogger.getKey());
|
||||||
ServerLoggers.setLevel(logger, previousLogger.getValue());
|
Loggers.setLevel(logger, previousLogger.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
return Collections.emptyMap();
|
return Collections.emptyMap();
|
||||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.test.store;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.logging.ServerLoggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -95,7 +95,7 @@ public class MockFSIndexStore extends IndexStore {
|
||||||
if (indexShard != null) {
|
if (indexShard != null) {
|
||||||
Boolean remove = shardSet.remove(indexShard);
|
Boolean remove = shardSet.remove(indexShard);
|
||||||
if (remove == Boolean.TRUE) {
|
if (remove == Boolean.TRUE) {
|
||||||
Logger logger = ServerLoggers.getLogger(getClass(), indexShard.indexSettings().getSettings(), indexShard.shardId());
|
Logger logger = Loggers.getLogger(getClass(), indexShard.indexSettings().getSettings(), indexShard.shardId());
|
||||||
MockFSDirectoryService.checkIndex(logger, indexShard.store(), indexShard.shardId());
|
MockFSDirectoryService.checkIndex(logger, indexShard.store(), indexShard.shardId());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue