mirror of https://github.com/apache/lucene.git
LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
This commit is contained in:
parent
03363f413f
commit
3af165b32a
|
@ -103,6 +103,7 @@ apply from: file('gradle/validation/owasp-dependency-check.gradle')
|
|||
apply from: file('gradle/validation/ecj-lint.gradle')
|
||||
apply from: file('gradle/validation/gradlew-scripts-tweaked.gradle')
|
||||
apply from: file('gradle/validation/missing-docs-check.gradle')
|
||||
apply from: file('gradle/validation/validate-log-calls.gradle')
|
||||
|
||||
// Source or data regeneration tasks
|
||||
apply from: file('gradle/generation/jflex.gradle')
|
||||
|
|
|
@ -26,6 +26,7 @@ configure(rootProject) {
|
|||
["ForbiddenApis", "help/forbiddenApis.txt", "How to add/apply rules for forbidden APIs."],
|
||||
["LocalSettings", "help/localSettings.txt", "Local settings, overrides and build performance tweaks."],
|
||||
["Git", "help/git.txt", "Git assistance and guides."],
|
||||
["ValidateLogCalls", "help/validateLogCalls.txt", "How to use logging calls efficiently."],
|
||||
]
|
||||
|
||||
helpFiles.each { section, path, sectionInfo ->
|
||||
|
@ -45,7 +46,7 @@ configure(rootProject) {
|
|||
println "guidelines, ant-equivalent commands etc. under help/*; or type:"
|
||||
helpFiles.each { section, path, sectionInfo ->
|
||||
println String.format(Locale.ROOT,
|
||||
" gradlew :help%-14s # %s", section, sectionInfo)
|
||||
" gradlew :help%-16s # %s", section, sectionInfo)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,196 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.tools.ant.BuildException
|
||||
|
||||
import static java.util.concurrent.TimeUnit.SECONDS
|
||||
|
||||
// Checks logging calls to keep from using patterns that might be expensive
|
||||
// either in CPU or unnecessary object creation
|
||||
|
||||
configure(rootProject) {
|
||||
|
||||
allprojects {
|
||||
|
||||
task validateLogCalls(type: ValidateLogCallsTask) {
|
||||
description "Checks that log calls are either validated or conform to efficient patterns."
|
||||
group "verification"
|
||||
|
||||
doFirst {
|
||||
if (project.hasProperty('srcDir')) {
|
||||
srcDir = project.getProperty('srcDir')
|
||||
} else { // Remove this later, make it optional
|
||||
throw new BuildException(String.format(Locale.ENGLISH,
|
||||
'''Until we get all the calls cleaned up, you MUST specify -PsrcDir=relative_path, e.g.
|
||||
"-PsrcDir=solr/core/src/java/org/apache/solr/core". This task will recursively check all
|
||||
"*.java" files under that directory'''))
|
||||
}
|
||||
|
||||
checkPlus = Boolean.valueOf(propertyOrDefault('checkPlus', 'false'))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ValidateLogCallsTask extends DefaultTask {
|
||||
@Input
|
||||
String srcDir = ""
|
||||
|
||||
@Input
|
||||
boolean checkPlus
|
||||
|
||||
def logLevels = ["log.trace", "log.debug", "log.info", "log.warn", "log.error", "log.fatal"]
|
||||
|
||||
def found = 0;
|
||||
def violations = new TreeSet();
|
||||
|
||||
def reportViolation(String msg) {
|
||||
violations.add(System.lineSeparator + msg);
|
||||
found++;
|
||||
}
|
||||
|
||||
// We have a log.someting line, check for patterns we're not fond of.
|
||||
def checkLogLine(File file, String line, int lineNumber, String prevLine) {
|
||||
// Strip all of the comments, things in quotes and the like.
|
||||
def level = ""
|
||||
def lev = (line =~ "log\\.(.*?)\\(")
|
||||
if (lev.find()) {
|
||||
level = lev.group(1)
|
||||
}
|
||||
def stripped =
|
||||
line.replaceFirst("//.*", " ") // remove comment to EOL. Again, fragile due to the possibility of embedded double slashes
|
||||
.replaceFirst(/.*?\(/, " ") // Get rid of "log.info("
|
||||
.replaceFirst(/\);/, " ") // get rid of the closing ");"
|
||||
.replaceFirst("/\\*.*?\\*/", " ") // replace embedded comments "/*....*/"
|
||||
.replaceAll(/".*?"/, " ") // remove anything between quotes. This is a bit fragile if there are embedded double quotes.
|
||||
.replaceAll(/timeLeft\(.*?\)/, " ") // used all over tests, it's benign
|
||||
.replaceAll(/TimeUnit\..*?\.convert\(.*?\)/, " ") // again, a pattern that's efficient
|
||||
|
||||
|
||||
def m = stripped =~ "\\(.*?\\)"
|
||||
def hasParens = m.find()
|
||||
def hasPlus = stripped.contains("+")
|
||||
if (hasParens == false && hasPlus == false) {
|
||||
return
|
||||
}
|
||||
// Check that previous line isn't an if statement for this log level
|
||||
if (prevLine.toLowerCase().contains("is" + level + "enabled")) {
|
||||
return
|
||||
}
|
||||
if (level.equals("error") || level.equals("fatal")) {
|
||||
if (checkPlus == false || hasPlus == false) {
|
||||
return
|
||||
}
|
||||
}
|
||||
reportViolation(String.format("Suspicious logging call File: '%s' line: '%d' log message: '%s' parent path: '%s'. Parameterize or surround with 'if (log.is*Enabled) {... stripped: '%s', level: '%s' checkPlus '%s' hasParens '%s' errorfatal '%s' hasPlus '%s'"
|
||||
, file.name
|
||||
, lineNumber, line
|
||||
, file.getParentFile().getAbsolutePath()
|
||||
, stripped //nocommit, debugging.
|
||||
, level
|
||||
, checkPlus
|
||||
, hasParens
|
||||
,level.equals("error") || level.equals("fatal"),
|
||||
hasPlus))
|
||||
}
|
||||
|
||||
|
||||
// Require all our logger definitions lower case "log", except a couple of special ones.
|
||||
def checkLogName(File file, String line) {
|
||||
// It's many times faster to do check this way than use a regex
|
||||
if (line.contains("static ") && line.contains("getLogger") && line.contains(" log ") == false) {
|
||||
switch (file.name) {
|
||||
case "LoggerFactory.java": break
|
||||
case "SolrCore.java": // Except for two know log files with a different name.
|
||||
if (line.contains("requestLog") || line.contains("slowLog")) {
|
||||
break
|
||||
}
|
||||
default:
|
||||
reportViolation("Change the logger name to lower-case 'log' in " + file.name + " " + line)
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def checkFile(File file) {
|
||||
int state = 0 // 0 == not collecting a log line, 1 == collecting a log line, 2 == just collected the last.
|
||||
int lineNumber = 0
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
// We have to assemble line-by-line to get the full log statement. We require that all loggers (except requestLog
|
||||
// and slowLog) be exactly "log". That will be checked as well.
|
||||
String prevLine = ""
|
||||
file.eachLine { String line ->
|
||||
lineNumber++
|
||||
checkLogName(file, line)
|
||||
switch (state) {
|
||||
case 0:
|
||||
logLevels.each {
|
||||
if (line.contains(it)) {
|
||||
if (line.contains(");")) {
|
||||
state = 2
|
||||
} else {
|
||||
state = 1
|
||||
}
|
||||
sb.setLength(0)
|
||||
sb.append(line)
|
||||
}
|
||||
}
|
||||
break
|
||||
|
||||
case 1: // collecting
|
||||
if (line.contains(");")) {
|
||||
state = 2
|
||||
}
|
||||
sb.append(line)
|
||||
break
|
||||
|
||||
default:
|
||||
reportViolation("Bad state, should be 0-1 in the switch statement") // make people aware the code sucks
|
||||
break
|
||||
}
|
||||
switch (state) { // It's just easier to do this here rather than read another line in the switch above.
|
||||
case 0:
|
||||
prevLine = line;
|
||||
break;
|
||||
case 1:
|
||||
break;
|
||||
case 2:
|
||||
checkLogLine(file, sb.toString(), lineNumber, prevLine)
|
||||
state = 0
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
def checkLogLines() {
|
||||
project.sourceSets.each { srcSet ->
|
||||
srcSet.java.each { f ->
|
||||
if (srcDir != null && f.getCanonicalPath().contains(srcDir)) {
|
||||
checkFile(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (found) {
|
||||
throw new BuildException(String.format(Locale.ENGLISH, 'Found %d violations in source files (%s).',
|
||||
found, violations.join(', ')));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
Using log calls
|
||||
===============
|
||||
|
||||
There are several hidden gotchas when using logging. The main
|
||||
problem is that certain patterns are evaluated and objects
|
||||
constructed even if the logging level is more restrictive
|
||||
than this call. If the logging level is WARN, then the following
|
||||
log.info patterns are evaluated and never-used objects are
|
||||
created even though the message is never logged.
|
||||
|
||||
This can be very expensive as per measurements with Java Flight
|
||||
Recorder. It's particularly egregious when complex operations
|
||||
are performed for, say, log.debug or trace calls, which
|
||||
are rarely actually used.
|
||||
|
||||
- log.info("some stuff " + "some other stuff") will concatenate the
|
||||
strings and create an object
|
||||
|
||||
- log.info("some stuff {}", object.method()) will execute the
|
||||
method.
|
||||
|
||||
- log.info("some stuff {}", object.toString()) will execute
|
||||
the toString and then throw the results away.
|
||||
|
||||
-----------
|
||||
NOTES:
|
||||
|
||||
- If you're creating a logging call that has no choice but to
|
||||
do something expensive, surround it with "if (log.is*Enabled)".
|
||||
|
||||
- Logging calls can take a bare exception, but do not
|
||||
use the braces if you want the stack trace! So
|
||||
log.info("stuff {}", exception) will not print the full stack,
|
||||
log.info("stuff ", exception) _will_ print the full stack.
|
||||
log.inf0("stuff {} ", some_object, exception) will print the full stack.
|
||||
|
||||
- When slf4j supports lambdas in logging calls (log4j2 does now),
|
||||
we can use lambdas rather than "if log.is*Enabled". slf4j 2.0
|
||||
will when released.
|
||||
|
||||
- error and fatal level messages are NOT checked. However, if you want to
|
||||
check these levels for including '+', specify '-PcheckPlus=true'. This is more
|
||||
a style than functional check.
|
||||
|
||||
For a fuller discussion, see LUCENE-7788 and the other JIRAs linked
|
||||
from there.
|
||||
|
||||
Until we get all the calls cleaned up, you MUST specify -PsrcDir=relative_path,
|
||||
e.g. '-PsrcDir=solr/core/src/java/org/apache/solr/core'. This task will
|
||||
recursively check all '*.java. files under that directory.
|
|
@ -156,6 +156,8 @@ Optimizations
|
|||
* LUCENE-9286: FST.Arc.BitTable reads directly FST bytes. Arc is lightweight again and FSTEnum traversal faster.
|
||||
(Bruno Roustant)
|
||||
|
||||
* LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects (Erick Erickson)
|
||||
|
||||
Bug Fixes
|
||||
---------------------
|
||||
* LUCENE-9259: Fix wrong NGramFilterFactory argument name for preserveOriginal option (Paul Pazderski)
|
||||
|
|
|
@ -101,6 +101,8 @@ Optimizations
|
|||
|
||||
* SOLR-14376: Optimize filter queries that match all docs. (David Smiley)
|
||||
|
||||
* LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects (Erick Erickson)
|
||||
|
||||
Bug Fixes
|
||||
---------------------
|
||||
* SOLR-13264: IndexSizeTrigger aboveOp / belowOp properties not in valid properties.
|
||||
|
|
|
@ -161,7 +161,7 @@ public class AnnotatedApi extends Api implements PermissionNameProvider {
|
|||
|
||||
List<Map> errs = CommandOperation.captureErrors(cmds);
|
||||
if (!errs.isEmpty()) {
|
||||
log.error(ERR + Utils.toJSONString(errs));
|
||||
log.error("{}{}", ERR, Utils.toJSONString(errs));
|
||||
throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, ERR, errs);
|
||||
}
|
||||
|
||||
|
|
|
@ -70,7 +70,7 @@ public class ApiBag {
|
|||
try {
|
||||
validateAndRegister(api, nameSubstitutes);
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to register plugin:" + api.getClass().getName() + "with spec :" + Utils.toJSONString(api.getSpec()), e);
|
||||
log.error("Unable to register plugin: {} with spec {} :", api.getClass().getName(), Utils.toJSONString(api.getSpec()), e);
|
||||
if (e instanceof RuntimeException) {
|
||||
throw (RuntimeException) e;
|
||||
} else {
|
||||
|
|
|
@ -132,7 +132,7 @@ public class V2HttpCall extends HttpSolrCall {
|
|||
core = cores.getCore(origCorename);
|
||||
}
|
||||
if (core == null) {
|
||||
log.error(">> path: '" + path + "'");
|
||||
log.error(">> path: '{}'", path);
|
||||
if (path.endsWith(CommonParams.INTROSPECT)) {
|
||||
initAdminRequest(path);
|
||||
return;
|
||||
|
|
|
@ -255,7 +255,9 @@ public class BlobRepository {
|
|||
replica = r;
|
||||
break;
|
||||
} else {
|
||||
log.info("replica {} says it is active but not a member of live nodes", r.get(ZkStateReader.NODE_NAME_PROP));
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("replica {} says it is active but not a member of live nodes", r.get(ZkStateReader.NODE_NAME_PROP));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -157,12 +157,16 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
@Override
|
||||
public void close() throws IOException {
|
||||
synchronized (this) {
|
||||
log.debug("Closing {} - {} directories currently being tracked", this.getClass().getSimpleName(), byDirectoryCache.size());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Closing {} - {} directories currently being tracked", this.getClass().getSimpleName(), byDirectoryCache.size());
|
||||
}
|
||||
this.closed = true;
|
||||
Collection<CacheValue> values = byDirectoryCache.values();
|
||||
for (CacheValue val : values) {
|
||||
log.debug("Closing {} - currently tracking: {}",
|
||||
this.getClass().getSimpleName(), val);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Closing {} - currently tracking: {}", this.getClass().getSimpleName(), val);
|
||||
}
|
||||
try {
|
||||
// if there are still refs out, we have to wait for them
|
||||
assert val.refCnt > -1 : val.refCnt;
|
||||
|
@ -190,7 +194,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
try {
|
||||
for (CacheValue v : val.closeEntries) {
|
||||
assert v.refCnt == 0 : val.refCnt;
|
||||
log.debug("Closing directory when closing factory: " + v.path);
|
||||
log.debug("Closing directory when closing factory: {}", v.path);
|
||||
boolean cl = closeCacheValue(v);
|
||||
if (cl) {
|
||||
closedDirs.add(v);
|
||||
|
@ -202,7 +206,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
}
|
||||
|
||||
for (CacheValue val : removeEntries) {
|
||||
log.debug("Removing directory after core close: " + val.path);
|
||||
log.debug("Removing directory after core close: {}", val.path);
|
||||
try {
|
||||
removeDirectory(val);
|
||||
} catch (Exception e) {
|
||||
|
@ -225,7 +229,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
// be sure this is called with the this sync lock
|
||||
// returns true if we closed the cacheValue, false if it will be closed later
|
||||
private boolean closeCacheValue(CacheValue cacheValue) {
|
||||
log.debug("looking to close {} {}", cacheValue.path, cacheValue.closeEntries.toString());
|
||||
log.debug("looking to close {} {}", cacheValue.path, cacheValue.closeEntries);
|
||||
List<CloseListener> listeners = closeListeners.remove(cacheValue.directory);
|
||||
if (listeners != null) {
|
||||
for (CloseListener listener : listeners) {
|
||||
|
@ -269,7 +273,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
|
||||
for (CacheValue val : cacheValue.removeEntries) {
|
||||
if (!val.deleteAfterCoreClose) {
|
||||
log.debug("Removing directory before core close: " + val.path);
|
||||
log.debug("Removing directory before core close: {}", val.path);
|
||||
try {
|
||||
removeDirectory(val);
|
||||
} catch (Exception e) {
|
||||
|
@ -293,13 +297,15 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
}
|
||||
|
||||
private void close(CacheValue val) {
|
||||
log.debug("Closing directory, CoreContainer#isShutdown={}", coreContainer != null ? coreContainer.isShutDown() : "null");
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Closing directory, CoreContainer#isShutdown={}", coreContainer != null ? coreContainer.isShutDown() : "null");
|
||||
}
|
||||
try {
|
||||
if (coreContainer != null && coreContainer.isShutDown() && val.directory instanceof ShutdownAwareDirectory) {
|
||||
log.debug("Closing directory on shutdown: " + val.path);
|
||||
log.debug("Closing directory on shutdown: {}", val.path);
|
||||
((ShutdownAwareDirectory) val.directory).closeOnShutdown();
|
||||
} else {
|
||||
log.debug("Closing directory: " + val.path);
|
||||
log.debug("Closing directory: {}", val.path);
|
||||
val.directory.close();
|
||||
}
|
||||
assert ObjectReleaseTracker.release(val.directory);
|
||||
|
@ -402,7 +408,7 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
dataHomePath = Paths.get((String) args.get(SolrXmlConfig.SOLR_DATA_HOME));
|
||||
}
|
||||
if (dataHomePath != null) {
|
||||
log.info(SolrXmlConfig.SOLR_DATA_HOME + "=" + dataHomePath);
|
||||
log.info("{} = {}", SolrXmlConfig.SOLR_DATA_HOME, dataHomePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -427,7 +433,9 @@ public abstract class CachingDirectoryFactory extends DirectoryFactory {
|
|||
throw new IllegalArgumentException("Unknown directory: " + directory
|
||||
+ " " + byDirectoryCache);
|
||||
}
|
||||
log.debug("Releasing directory: " + cacheValue.path + " " + (cacheValue.refCnt - 1) + " " + cacheValue.doneWithDir);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Releasing directory: {} {} {}", cacheValue.path, (cacheValue.refCnt - 1), cacheValue.doneWithDir);
|
||||
}
|
||||
|
||||
cacheValue.refCnt--;
|
||||
|
||||
|
|
|
@ -57,7 +57,9 @@ public class ConfigSetProperties {
|
|||
try {
|
||||
reader = new InputStreamReader(loader.openResource(name), StandardCharsets.UTF_8);
|
||||
} catch (SolrResourceNotFoundException ex) {
|
||||
log.debug("Did not find ConfigSet properties, assuming default properties: {}", ex.getMessage());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Did not find ConfigSet properties, assuming default properties: {}", ex.getMessage());
|
||||
}
|
||||
return null;
|
||||
} catch (Exception ex) {
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to load reader for ConfigSet properties: " + name, ex);
|
||||
|
|
|
@ -231,7 +231,7 @@ public abstract class ConfigSetService {
|
|||
} catch (FileNotFoundException e) {
|
||||
return null; // acceptable
|
||||
} catch (IOException e) {
|
||||
log.warn("Unexpected exception when getting modification time of " + schemaFile, e);
|
||||
log.warn("Unexpected exception when getting modification time of {}", schemaFile, e);
|
||||
return null; // debatable; we'll see an error soon if there's a real problem
|
||||
}
|
||||
}
|
||||
|
|
|
@ -274,7 +274,9 @@ public class CoreContainer {
|
|||
}
|
||||
|
||||
{
|
||||
log.debug("New CoreContainer " + System.identityHashCode(this));
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("New CoreContainer {}", System.identityHashCode(this));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -346,7 +348,7 @@ public class CoreContainer {
|
|||
log.debug("Authorization config not modified");
|
||||
return;
|
||||
}
|
||||
log.info("Initializing authorization plugin: " + klas);
|
||||
log.info("Initializing authorization plugin: {}", klas);
|
||||
authorizationPlugin = new SecurityPluginHolder<>(newVersion,
|
||||
getResourceLoader().newInstance(klas, AuthorizationPlugin.class));
|
||||
|
||||
|
@ -380,7 +382,7 @@ public class CoreContainer {
|
|||
log.debug("Auditlogger config not modified");
|
||||
return;
|
||||
}
|
||||
log.info("Initializing auditlogger plugin: " + klas);
|
||||
log.info("Initializing auditlogger plugin: {}", klas);
|
||||
newAuditloggerPlugin = new SecurityPluginHolder<>(newVersion,
|
||||
getResourceLoader().newInstance(klas, AuditLoggerPlugin.class));
|
||||
|
||||
|
@ -413,11 +415,11 @@ public class CoreContainer {
|
|||
}
|
||||
|
||||
if (pluginClassName != null) {
|
||||
log.debug("Authentication plugin class obtained from security.json: " + pluginClassName);
|
||||
log.debug("Authentication plugin class obtained from security.json: {}", pluginClassName);
|
||||
} else if (System.getProperty(AUTHENTICATION_PLUGIN_PROP) != null) {
|
||||
pluginClassName = System.getProperty(AUTHENTICATION_PLUGIN_PROP);
|
||||
log.debug("Authentication plugin class obtained from system property '" +
|
||||
AUTHENTICATION_PLUGIN_PROP + "': " + pluginClassName);
|
||||
log.debug("Authentication plugin class obtained from system property '{}': {}"
|
||||
, AUTHENTICATION_PLUGIN_PROP, pluginClassName);
|
||||
} else {
|
||||
log.debug("No authentication plugin used.");
|
||||
}
|
||||
|
@ -431,7 +433,7 @@ public class CoreContainer {
|
|||
|
||||
// Initialize the plugin
|
||||
if (pluginClassName != null) {
|
||||
log.info("Initializing authentication plugin: " + pluginClassName);
|
||||
log.info("Initializing authentication plugin: {}", pluginClassName);
|
||||
authenticationPlugin = new SecurityPluginHolder<>(newVersion,
|
||||
getResourceLoader().newInstance(pluginClassName,
|
||||
AuthenticationPlugin.class,
|
||||
|
@ -582,7 +584,9 @@ public class CoreContainer {
|
|||
* Load the cores defined for this CoreContainer
|
||||
*/
|
||||
public void load() {
|
||||
log.debug("Loading cores into CoreContainer [instanceDir={}]", getSolrHome());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading cores into CoreContainer [instanceDir={}]", getSolrHome());
|
||||
}
|
||||
|
||||
// Always add $SOLR_HOME/lib to the shared resource loader
|
||||
Set<String> libDirs = new LinkedHashSet<>();
|
||||
|
@ -890,16 +894,17 @@ public class CoreContainer {
|
|||
|
||||
private void warnUsersOfInsecureSettings() {
|
||||
if (authenticationPlugin == null || authorizationPlugin == null) {
|
||||
log.warn("Not all security plugins configured! authentication={} authorization={}. Solr is only as secure as " +
|
||||
"you make it. Consider configuring authentication/authorization before exposing Solr to users internal or " +
|
||||
"external. See https://s.apache.org/solrsecurity for more info",
|
||||
(authenticationPlugin != null) ? "enabled" : "disabled",
|
||||
(authorizationPlugin != null) ? "enabled" : "disabled");
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Not all security plugins configured! authentication={} authorization={}. Solr is only as secure as " +
|
||||
"you make it. Consider configuring authentication/authorization before exposing Solr to users internal or " +
|
||||
"external. See https://s.apache.org/solrsecurity for more info",
|
||||
(authenticationPlugin != null) ? "enabled" : "disabled",
|
||||
(authorizationPlugin != null) ? "enabled" : "disabled");
|
||||
}
|
||||
}
|
||||
|
||||
if (authenticationPlugin !=null && StringUtils.isNotEmpty(System.getProperty("solr.jetty.https.port"))) {
|
||||
log.warn("Solr authentication is enabled, but SSL is off. Consider enabling SSL to protect user credentials and " +
|
||||
"data with encryption.");
|
||||
log.warn("Solr authentication is enabled, but SSL is off. Consider enabling SSL to protect user credentials and data with encryption.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -928,7 +933,9 @@ public class CoreContainer {
|
|||
OverseerTaskQueue overseerCollectionQueue = zkController.getOverseerCollectionQueue();
|
||||
overseerCollectionQueue.allowOverseerPendingTasksToComplete();
|
||||
}
|
||||
log.info("Shutting down CoreContainer instance=" + System.identityHashCode(this));
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Shutting down CoreContainer instance={}", System.identityHashCode(this));
|
||||
}
|
||||
|
||||
ExecutorUtil.shutdownAndAwaitTermination(coreContainerAsyncTaskExecutor);
|
||||
ExecutorService customThreadPool = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("closeThreadPool"));
|
||||
|
@ -1111,13 +1118,17 @@ public class CoreContainer {
|
|||
coreInitFailures.remove(cd.getName());
|
||||
|
||||
if (old == null || old == core) {
|
||||
log.debug("registering core: " + cd.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("registering core: {}", cd.getName());
|
||||
}
|
||||
if (registerInZk) {
|
||||
zkSys.registerInZk(core, false, skipRecovery);
|
||||
}
|
||||
return null;
|
||||
} else {
|
||||
log.debug("replacing core: " + cd.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("replacing core: {}", cd.getName());
|
||||
}
|
||||
old.close();
|
||||
if (registerInZk) {
|
||||
zkSys.registerInZk(core, false, skipRecovery);
|
||||
|
@ -1263,7 +1274,9 @@ public class CoreContainer {
|
|||
|
||||
ConfigSet coreConfig = coreConfigService.loadConfigSet(dcore);
|
||||
dcore.setConfigSetTrusted(coreConfig.isTrusted());
|
||||
log.info("Creating SolrCore '{}' using configuration from {}, trusted={}", dcore.getName(), coreConfig.getName(), dcore.isConfigSetTrusted());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Creating SolrCore '{}' using configuration from {}, trusted={}", dcore.getName(), coreConfig.getName(), dcore.isConfigSetTrusted());
|
||||
}
|
||||
try {
|
||||
core = new SolrCore(this, dcore, coreConfig);
|
||||
} catch (SolrException e) {
|
||||
|
@ -1365,8 +1378,10 @@ public class CoreContainer {
|
|||
case none:
|
||||
throw original;
|
||||
default:
|
||||
log.warn("Failed to create core, and did not recognize specified 'CoreInitFailedAction': [{}]. Valid options are {}.",
|
||||
action, Arrays.asList(CoreInitFailedAction.values()));
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Failed to create core, and did not recognize specified 'CoreInitFailedAction': [{}]. Valid options are {}.",
|
||||
action, Arrays.asList(CoreInitFailedAction.values()));
|
||||
}
|
||||
throw original;
|
||||
}
|
||||
}
|
||||
|
@ -1516,7 +1531,9 @@ public class CoreContainer {
|
|||
try {
|
||||
solrCores.waitAddPendingCoreOps(cd.getName());
|
||||
ConfigSet coreConfig = coreConfigService.loadConfigSet(cd);
|
||||
log.info("Reloading SolrCore '{}' using configuration from {}", cd.getName(), coreConfig.getName());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Reloading SolrCore '{}' using configuration from {}", cd.getName(), coreConfig.getName());
|
||||
}
|
||||
newCore = core.reload(coreConfig);
|
||||
|
||||
DocCollection docCollection = null;
|
||||
|
@ -1600,7 +1617,7 @@ public class CoreContainer {
|
|||
|
||||
coresLocator.swap(this, solrCores.getCoreDescriptor(n0), solrCores.getCoreDescriptor(n1));
|
||||
|
||||
log.info("swapped: " + n0 + " with " + n1);
|
||||
log.info("swapped: {} with {}", n0, n1);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1963,9 +1980,11 @@ public class CoreContainer {
|
|||
DocCollection coll = getZkController().getZkStateReader().getClusterState().getCollection(cd.getCollectionName());
|
||||
for (Replica rep : coll.getReplicas()) {
|
||||
if (coreName.equals(rep.getCoreName())) {
|
||||
log.warn("Core properties file for node {} found with no coreNodeName, attempting to repair with value {}. See SOLR-11503. " +
|
||||
"This message should only appear if upgrading from collections created Solr 6.6.1 through 7.1.",
|
||||
rep.getCoreName(), rep.getName());
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Core properties file for node {} found with no coreNodeName, attempting to repair with value {}. See SOLR-11503. " +
|
||||
"This message should only appear if upgrading from collections created Solr 6.6.1 through 7.1.",
|
||||
rep.getCoreName(), rep.getName());
|
||||
}
|
||||
cd.getCloudDescriptor().setCoreNodeName(rep.getName());
|
||||
coresLocator.persist(this, cd);
|
||||
return true;
|
||||
|
|
|
@ -109,7 +109,9 @@ public class CorePropertiesLocator implements CoresLocator {
|
|||
try {
|
||||
Files.deleteIfExists(propfile);
|
||||
} catch (IOException e) {
|
||||
log.warn("Couldn't delete core properties file {}: {}", propfile, e.getMessage());
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Couldn't delete core properties file {}: {}", propfile, e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -144,7 +146,9 @@ public class CorePropertiesLocator implements CoresLocator {
|
|||
if (file.getFileName().toString().equals(PROPERTIES_FILENAME)) {
|
||||
CoreDescriptor cd = buildCoreDescriptor(file, cc);
|
||||
if (cd != null) {
|
||||
log.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir());
|
||||
}
|
||||
cds.add(cd);
|
||||
}
|
||||
return FileVisitResult.SKIP_SIBLINGS;
|
||||
|
@ -167,9 +171,13 @@ public class CorePropertiesLocator implements CoresLocator {
|
|||
} catch (IOException e) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Couldn't walk file tree under " + this.rootDirectory, e);
|
||||
}
|
||||
log.info("Found {} core definitions underneath {}", cds.size(), rootDirectory);
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Found {} core definitions underneath {}", cds.size(), rootDirectory);
|
||||
}
|
||||
if (cds.size() > 0) {
|
||||
log.info("Cores are: {}", cds.stream().map(CoreDescriptor::getName).collect(Collectors.toList()));
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Cores are: {}", cds.stream().map(CoreDescriptor::getName).collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
return cds;
|
||||
}
|
||||
|
|
|
@ -162,13 +162,14 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
|
|||
if (this.hdfsDataDir != null && this.hdfsDataDir.length() == 0) {
|
||||
this.hdfsDataDir = null;
|
||||
} else {
|
||||
log.info(HDFS_HOME + "=" + this.hdfsDataDir);
|
||||
log.info("{}={}", HDFS_HOME, this.hdfsDataDir);
|
||||
}
|
||||
cacheMerges = getConfig(CACHE_MERGES, false);
|
||||
cacheReadOnce = getConfig(CACHE_READONCE, false);
|
||||
boolean kerberosEnabled = getConfig(KERBEROS_ENABLED, false);
|
||||
log.info("Solr Kerberos Authentication "
|
||||
+ (kerberosEnabled ? "enabled" : "disabled"));
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Solr Kerberos Authentication {}", (kerberosEnabled ? "enabled" : "disabled"));
|
||||
}
|
||||
if (kerberosEnabled) {
|
||||
initKerberos();
|
||||
}
|
||||
|
@ -178,7 +179,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
|
|||
protected LockFactory createLockFactory(String rawLockType) throws IOException {
|
||||
if (null == rawLockType) {
|
||||
rawLockType = DirectoryFactory.LOCK_TYPE_HDFS;
|
||||
log.warn("No lockType configured, assuming '"+rawLockType+"'.");
|
||||
log.warn("No lockType configured, assuming '{}'.", rawLockType);
|
||||
}
|
||||
final String lockType = rawLockType.toLowerCase(Locale.ROOT).trim();
|
||||
switch (lockType) {
|
||||
|
@ -225,10 +226,12 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
|
|||
log.info(
|
||||
"Number of slabs of block cache [{}] with direct memory allocation set to [{}]",
|
||||
bankCount, directAllocation);
|
||||
log.info(
|
||||
"Block cache target memory usage, slab size of [{}] will allocate [{}] slabs and use ~[{}] bytes",
|
||||
new Object[] {slabSize, bankCount,
|
||||
((long) bankCount * (long) slabSize)});
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info(
|
||||
"Block cache target memory usage, slab size of [{}] will allocate [{}] slabs and use ~[{}] bytes",
|
||||
new Object[]{slabSize, bankCount,
|
||||
((long) bankCount * (long) slabSize)});
|
||||
}
|
||||
|
||||
int bsBufferSize = params.getInt("solr.hdfs.blockcache.bufferstore.buffersize", blockSize);
|
||||
int bsBufferCount = params.getInt("solr.hdfs.blockcache.bufferstore.buffercount", 0); // this is actually total size
|
||||
|
@ -535,7 +538,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
|
|||
try {
|
||||
pathExists = fileSystem.exists(dataDirPath);
|
||||
} catch (IOException e) {
|
||||
log.error("Error checking if hdfs path "+dataDir+" exists", e);
|
||||
log.error("Error checking if hdfs path exists {}", dataDir, e);
|
||||
}
|
||||
if (!pathExists) {
|
||||
log.warn("{} does not point to a valid data directory; skipping clean-up of old index directories.", dataDir);
|
||||
|
|
|
@ -128,7 +128,9 @@ public class PluginBag<T> implements AutoCloseable {
|
|||
|
||||
public PluginHolder<T> createPlugin(PluginInfo info) {
|
||||
if ("true".equals(String.valueOf(info.attributes.get("runtimeLib")))) {
|
||||
log.debug(" {} : '{}' created with runtimeLib=true ", meta.getCleanTag(), info.name);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug(" {} : '{}' created with runtimeLib=true ", meta.getCleanTag(), info.name);
|
||||
}
|
||||
LazyPluginHolder<T> holder = new LazyPluginHolder<>(meta, info, core, RuntimeLib.isEnabled() ?
|
||||
core.getMemClassLoader() :
|
||||
core.getResourceLoader(), true);
|
||||
|
@ -137,7 +139,9 @@ public class PluginBag<T> implements AutoCloseable {
|
|||
(PluginHolder<T>) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder(holder) :
|
||||
holder;
|
||||
} else if ("lazy".equals(info.attributes.get("startup")) && meta.options.contains(SolrConfig.PluginOpts.LAZY)) {
|
||||
log.debug("{} : '{}' created with startup=lazy ", meta.getCleanTag(), info.name);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("{} : '{}' created with startup=lazy ", meta.getCleanTag(), info.name);
|
||||
}
|
||||
return new LazyPluginHolder<T>(meta, info, core, core.getResourceLoader(), false);
|
||||
} else {
|
||||
if (info.pkgName != null) {
|
||||
|
@ -254,7 +258,11 @@ public class PluginBag<T> implements AutoCloseable {
|
|||
|
||||
void setDefault(String def) {
|
||||
if (!registry.containsKey(def)) return;
|
||||
if (this.def != null) log.warn("Multiple defaults for : " + meta.getCleanTag());
|
||||
if (this.def != null) {
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Multiple defaults for : {}", meta.getCleanTag());
|
||||
}
|
||||
}
|
||||
this.def = def;
|
||||
}
|
||||
|
||||
|
@ -291,11 +299,17 @@ public class PluginBag<T> implements AutoCloseable {
|
|||
String name = info.name;
|
||||
if (meta.clazz.equals(SolrRequestHandler.class)) name = RequestHandlers.normalize(info.name);
|
||||
PluginHolder<T> old = put(name, o);
|
||||
if (old != null) log.warn("Multiple entries of {} with name {}", meta.getCleanTag(), name);
|
||||
if (old != null) {
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Multiple entries of {} with name {}", meta.getCleanTag(), name);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (infos.size() > 0) { // Aggregate logging
|
||||
log.debug("[{}] Initialized {} plugins of type {}: {}", solrCore.getName(), infos.size(), meta.getCleanTag(),
|
||||
infos.stream().map(i -> i.name).collect(Collectors.toList()));
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("[{}] Initialized {} plugins of type {}: {}", solrCore.getName(), infos.size(), meta.getCleanTag(),
|
||||
infos.stream().map(i -> i.name).collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
for (Map.Entry<String, T> e : defaults.entrySet()) {
|
||||
if (!contains(e.getKey())) {
|
||||
|
@ -332,7 +346,7 @@ public class PluginBag<T> implements AutoCloseable {
|
|||
try {
|
||||
e.getValue().close();
|
||||
} catch (Exception exp) {
|
||||
log.error("Error closing plugin " + e.getKey() + " of type : " + meta.getCleanTag(), exp);
|
||||
log.error("Error closing plugin {} of type : {}", e.getKey(), meta.getCleanTag(), exp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -341,7 +355,7 @@ public class PluginBag<T> implements AutoCloseable {
|
|||
try {
|
||||
if (inst != null && inst instanceof AutoCloseable) ((AutoCloseable) inst).close();
|
||||
} catch (Exception e) {
|
||||
log.error("Error closing "+ inst , e);
|
||||
log.error("Error closing {}", inst , e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -442,7 +456,9 @@ public class PluginBag<T> implements AutoCloseable {
|
|||
|
||||
private synchronized boolean createInst() {
|
||||
if (lazyInst != null) return false;
|
||||
log.info("Going to create a new {} with {} ", pluginMeta.getCleanTag(), pluginInfo.toString());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Going to create a new {} with {} ", pluginMeta.getCleanTag(), pluginInfo.toString());
|
||||
}
|
||||
if (resourceLoader instanceof MemClassLoader) {
|
||||
MemClassLoader loader = (MemClassLoader) resourceLoader;
|
||||
loader.loadJars();
|
||||
|
|
|
@ -47,7 +47,7 @@ public class QuerySenderListener extends AbstractSolrEventListener {
|
|||
@Override
|
||||
public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) {
|
||||
final SolrIndexSearcher searcher = newSearcher;
|
||||
log.info("QuerySenderListener sending requests to " + newSearcher);
|
||||
log.info("QuerySenderListener sending requests to {}", newSearcher);
|
||||
List<NamedList> allLists = (List<NamedList>)getArgs().get("queries");
|
||||
if (allLists == null) return;
|
||||
boolean createNewReqInfo = SolrRequestInfo.getRequestInfo() == null;
|
||||
|
|
|
@ -129,7 +129,9 @@ public final class RequestHandlers {
|
|||
}
|
||||
handlers.init(Collections.emptyMap(),core, modifiedInfos);
|
||||
handlers.alias(handlers.getDefault(), "");
|
||||
log.debug("Registered paths: {}" , StrUtils.join(new ArrayList<>(handlers.keySet()) , ',' ));
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Registered paths: {}", StrUtils.join(new ArrayList<>(handlers.keySet()), ','));
|
||||
}
|
||||
if (handlers.get("") == null && !handlers.alias("/select", "")) {
|
||||
if (handlers.get("") == null && !handlers.alias("standard", "")) {
|
||||
log.warn("no default request handler is registered (either '/select' or 'standard')");
|
||||
|
@ -143,7 +145,7 @@ public final class RequestHandlers {
|
|||
if(p!=null) {
|
||||
for (String arg : StrUtils.splitSmart(p, ',')) {
|
||||
if(config.getInitParams().containsKey(arg)) ags.add(config.getInitParams().get(arg));
|
||||
else log.warn("INVALID paramSet {} in requestHandler {}", arg, info.toString());
|
||||
else log.warn("INVALID paramSet {} in requestHandler {}", arg, info);
|
||||
}
|
||||
}
|
||||
for (InitParams args : config.getInitParams().values())
|
||||
|
|
|
@ -147,13 +147,17 @@ public class RequestParams implements MapSerializable {
|
|||
ZkSolrResourceLoader resourceLoader = (ZkSolrResourceLoader) loader;
|
||||
try {
|
||||
Stat stat = resourceLoader.getZkController().getZkClient().exists(resourceLoader.getConfigSetZkPath() + "/" + RequestParams.RESOURCE, null, true);
|
||||
log.debug("latest version of {} in ZK is : {}", resourceLoader.getConfigSetZkPath() + "/" + RequestParams.RESOURCE, stat == null ? "" : stat.getVersion());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("latest version of {}/{} in ZK is : {}", resourceLoader.getConfigSetZkPath(), RequestParams.RESOURCE, stat == null ? "" : stat.getVersion());
|
||||
}
|
||||
if (stat == null) {
|
||||
requestParams = new RequestParams(Collections.EMPTY_MAP, -1);
|
||||
} else if (requestParams == null || stat.getVersion() > requestParams.getZnodeVersion()) {
|
||||
Object[] o = getMapAndVersion(loader, RequestParams.RESOURCE);
|
||||
requestParams = new RequestParams((Map) o[0], (Integer) o[1]);
|
||||
log.info("request params refreshed to version {}", requestParams.getZnodeVersion());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("request params refreshed to version {}", requestParams.getZnodeVersion());
|
||||
}
|
||||
}
|
||||
} catch (KeeperException | InterruptedException e) {
|
||||
SolrZkClient.checkInterrupted(e);
|
||||
|
|
|
@ -86,10 +86,10 @@ public class SchemaCodecFactory extends CodecFactory implements SolrCoreAware {
|
|||
"Invalid compressionMode: '" + compressionModeStr +
|
||||
"'. Value must be one of " + Arrays.toString(Mode.values()));
|
||||
}
|
||||
log.debug("Using compressionMode: " + compressionMode);
|
||||
log.debug("Using compressionMode: {}", compressionMode);
|
||||
} else {
|
||||
compressionMode = SOLR_DEFAULT_COMPRESSION_MODE;
|
||||
log.debug("Using default compressionMode: " + compressionMode);
|
||||
log.debug("Using default compressionMode: {}", compressionMode);
|
||||
}
|
||||
codec = new Lucene84Codec(compressionMode) {
|
||||
@Override
|
||||
|
|
|
@ -203,9 +203,11 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
|
|||
|
||||
booleanQueryMaxClauseCount = getInt("query/maxBooleanClauses", IndexSearcher.getMaxClauseCount());
|
||||
if (IndexSearcher.getMaxClauseCount() < booleanQueryMaxClauseCount) {
|
||||
log.warn("solrconfig.xml: <maxBooleanClauses> of {} is greater than global limit of {} "+
|
||||
"and will have no effect", booleanQueryMaxClauseCount, IndexSearcher.getMaxClauseCount());
|
||||
log.warn("set 'maxBooleanClauses' in solr.xml to increase global limit");
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("solrconfig.xml: <maxBooleanClauses> of {} is greater than global limit of {} " +
|
||||
"and will have no effect set 'maxBooleanClauses' in solr.xml to increase global limit"
|
||||
, booleanQueryMaxClauseCount, IndexSearcher.getMaxClauseCount());
|
||||
}
|
||||
}
|
||||
|
||||
// Warn about deprecated / discontinued parameters
|
||||
|
@ -315,12 +317,13 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
|
|||
}
|
||||
|
||||
if (version == Version.LATEST && !versionWarningAlreadyLogged.getAndSet(true)) {
|
||||
log.warn(
|
||||
"You should not use LATEST as luceneMatchVersion property: "+
|
||||
"if you use this setting, and then Solr upgrades to a newer release of Lucene, "+
|
||||
"sizable changes may happen. If precise back compatibility is important "+
|
||||
"then you should instead explicitly specify an actual Lucene version."
|
||||
);
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn(
|
||||
"You should not use LATEST as luceneMatchVersion property: " +
|
||||
"if you use this setting, and then Solr upgrades to a newer release of Lucene, " +
|
||||
"sizable changes may happen. If precise back compatibility is important " +
|
||||
"then you should instead explicitly specify an actual Lucene version.");
|
||||
}
|
||||
}
|
||||
|
||||
return version;
|
||||
|
@ -575,7 +578,7 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
|
|||
try {
|
||||
return valueOf(s.toUpperCase(Locale.ROOT));
|
||||
} catch (Exception e) {
|
||||
log.warn("Unrecognized value for lastModFrom: " + s, e);
|
||||
log.warn("Unrecognized value for lastModFrom: {}", s, e);
|
||||
return BOGUS;
|
||||
}
|
||||
}
|
||||
|
@ -608,9 +611,8 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
|
|||
? Long.valueOf(ttlStr)
|
||||
: null;
|
||||
} catch (Exception e) {
|
||||
log.warn("Ignoring exception while attempting to " +
|
||||
"extract max-age from cacheControl config: " +
|
||||
cacheControlHeader, e);
|
||||
log.warn("Ignoring exception while attempting to extract max-age from cacheControl config: {}"
|
||||
, cacheControlHeader, e);
|
||||
}
|
||||
}
|
||||
maxAge = tmp;
|
||||
|
@ -758,7 +760,9 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
|
|||
try {
|
||||
urls.addAll(SolrResourceLoader.getURLs(libPath));
|
||||
} catch (IOException e) {
|
||||
log.warn("Couldn't add files from {} to classpath: {}", libPath, e.getMessage());
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Couldn't add files from {} to classpath: {}", libPath, e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -784,14 +788,18 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
|
|||
else
|
||||
urls.addAll(SolrResourceLoader.getFilteredURLs(dir, regex));
|
||||
} catch (IOException e) {
|
||||
log.warn("Couldn't add files from {} filtered by {} to classpath: {}", dir, regex, e.getMessage());
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Couldn't add files from {} filtered by {} to classpath: {}", dir, regex, e.getMessage());
|
||||
}
|
||||
}
|
||||
} else if (null != path) {
|
||||
final Path dir = instancePath.resolve(path);
|
||||
try {
|
||||
urls.add(dir.toUri().toURL());
|
||||
} catch (MalformedURLException e) {
|
||||
log.warn("Couldn't add file {} to classpath: {}", dir, e.getMessage());
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Couldn't add file {} to classpath: {}", dir, e.getMessage());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("lib: missing mandatory attributes: 'dir' or 'path'");
|
||||
|
@ -951,7 +959,9 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
|
|||
|
||||
public RequestParams refreshRequestParams() {
|
||||
requestParams = RequestParams.getFreshRequestParams(getResourceLoader(), requestParams);
|
||||
log.debug("current version of requestparams : {}", requestParams.getZnodeVersion());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("current version of requestparams : {}", requestParams.getZnodeVersion());
|
||||
}
|
||||
return requestParams;
|
||||
}
|
||||
|
||||
|
|
|
@ -953,7 +953,9 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
this.dataDir = initDataDir(dataDir, solrConfig, coreDescriptor);
|
||||
this.ulogDir = initUpdateLogDir(coreDescriptor);
|
||||
|
||||
log.info("[{}] Opening new SolrCore at [{}], dataDir=[{}]", logid, getInstancePath(), this.dataDir);
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("[{}] Opening new SolrCore at [{}], dataDir=[{}]", logid, getInstancePath(), this.dataDir);
|
||||
}
|
||||
|
||||
checkVersionFieldExistsInSchema(schema, coreDescriptor);
|
||||
setLatestSchema(schema);
|
||||
|
@ -1093,8 +1095,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
|
||||
if (coreContainer != null && coreContainer.isZooKeeperAware()) {
|
||||
if (reqHandlers.get("/get") == null) {
|
||||
log.warn("WARNING: RealTimeGetHandler is not registered at /get. " +
|
||||
"SolrCloud will always use full index replication instead of the more efficient PeerSync method.");
|
||||
log.warn("WARNING: RealTimeGetHandler is not registered at /get. SolrCloud will always use full index replication instead of the more efficient PeerSync method.");
|
||||
}
|
||||
|
||||
// ZK pre-register would have already happened so we read slice properties now
|
||||
|
@ -1370,7 +1371,9 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
close();
|
||||
while (!isClosed()) {
|
||||
final long milliSleep = 100;
|
||||
log.info("Core {} is not yet closed, waiting {} ms before checking again.", getName(), milliSleep);
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Core {} is not yet closed, waiting {} ms before checking again.", getName(), milliSleep);
|
||||
}
|
||||
try {
|
||||
Thread.sleep(milliSleep);
|
||||
} catch (InterruptedException e) {
|
||||
|
@ -1426,9 +1429,13 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
if (pluginInfo != null && pluginInfo.className != null && pluginInfo.className.length() > 0) {
|
||||
cache = createInitInstance(pluginInfo, StatsCache.class, null,
|
||||
LocalStatsCache.class.getName());
|
||||
log.debug("Using statsCache impl: {}", cache.getClass().getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Using statsCache impl: {}", cache.getClass().getName());
|
||||
}
|
||||
} else {
|
||||
log.debug("Using default statsCache cache: {}", LocalStatsCache.class.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Using default statsCache cache: {}", LocalStatsCache.class.getName());
|
||||
}
|
||||
cache = new LocalStatsCache();
|
||||
}
|
||||
return cache;
|
||||
|
@ -1728,7 +1735,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
sb.append(o == null ? "(null)" : o.toString());
|
||||
}
|
||||
// System.out.println(sb.toString());
|
||||
log.info(sb.toString());
|
||||
log.info("{}", sb);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1915,7 +1922,9 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
throws IOException {
|
||||
IndexReader.CacheHelper cacheHelper = ctx.reader().getReaderCacheHelper();
|
||||
if (cacheHelper == null) {
|
||||
log.debug("Cannot cache IndexFingerprint as reader does not support caching. searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Cannot cache IndexFingerprint as reader does not support caching. searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
|
||||
}
|
||||
return IndexFingerprint.getFingerprint(searcher, ctx, maxVersion);
|
||||
}
|
||||
|
||||
|
@ -1926,7 +1935,9 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
// documents were deleted from segment for which fingerprint was cached
|
||||
//
|
||||
if (f == null || (f.getMaxInHash() > maxVersion) || (f.getNumDocs() != ctx.reader().numDocs())) {
|
||||
log.debug("IndexFingerprint cache miss for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("IndexFingerprint cache miss for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
|
||||
}
|
||||
f = IndexFingerprint.getFingerprint(searcher, ctx, maxVersion);
|
||||
// cache fingerprint for the segment only if all the versions in the segment are included in the fingerprint
|
||||
if (f.getMaxVersionEncountered() == f.getMaxInHash()) {
|
||||
|
@ -1935,9 +1946,13 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
}
|
||||
|
||||
} else {
|
||||
log.debug("IndexFingerprint cache hit for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("IndexFingerprint cache hit for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
|
||||
}
|
||||
}
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Cache Size: {}, Segments Size:{}", perSegmentFingerprintCache.size(), searcher.getTopReaderContext().leaves().size());
|
||||
}
|
||||
log.debug("Cache Size: {}, Segments Size:{}", perSegmentFingerprintCache.size(), searcher.getTopReaderContext().leaves().size());
|
||||
return f;
|
||||
}
|
||||
|
||||
|
@ -2083,7 +2098,9 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
// but log a message about it to minimize confusion
|
||||
|
||||
newestSearcher.incref();
|
||||
log.debug("SolrIndexSearcher has not changed - not re-opening: {}", newestSearcher.get().getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("SolrIndexSearcher has not changed - not re-opening: {}", newestSearcher.get().getName());
|
||||
}
|
||||
return newestSearcher;
|
||||
|
||||
} // ELSE: open a new searcher against the old reader...
|
||||
|
@ -2219,7 +2236,9 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
try {
|
||||
searcherLock.wait();
|
||||
} catch (InterruptedException e) {
|
||||
log.info(SolrException.toStr(e));
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info(SolrException.toStr(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2248,7 +2267,9 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
try {
|
||||
searcherLock.wait();
|
||||
} catch (InterruptedException e) {
|
||||
log.info(SolrException.toStr(e));
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info(SolrException.toStr(e));
|
||||
}
|
||||
}
|
||||
continue; // go back to the top of the loop and retry
|
||||
} else if (onDeckSearchers > 1) {
|
||||
|
@ -2499,7 +2520,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
// NOTE: this should not happen now - see close() for details.
|
||||
// *BUT* if we left it enabled, this could still happen before
|
||||
// close() stopped the executor - so disable this test for now.
|
||||
log.error("Ignoring searcher register on closed core:" + newSearcher);
|
||||
log.error("Ignoring searcher register on closed core:{}", newSearcher);
|
||||
_searcher.decref();
|
||||
}
|
||||
***/
|
||||
|
@ -3105,7 +3126,9 @@ public final class SolrCore implements SolrInfoBean, Closeable {
|
|||
return false;
|
||||
}
|
||||
if (stat.getVersion() > currentVersion) {
|
||||
log.debug("{} is stale will need an update from {} to {}", zkPath, currentVersion, stat.getVersion());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("{} is stale will need an update from {} to {}", zkPath, currentVersion, stat.getVersion());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -541,8 +541,10 @@ class SolrCores {
|
|||
public TransientSolrCoreCache getTransientCacheHandler() {
|
||||
|
||||
if (transientCoreCache == null) {
|
||||
log.error("No transient handler has been defined. Check solr.xml to see if an attempt to provide a custom " +
|
||||
"TransientSolrCoreCacheFactory was done incorrectly since the default should have been used otherwise.");
|
||||
if (log.isErrorEnabled()) {
|
||||
log.error("No transient handler has been defined. Check solr.xml to see if an attempt to provide a custom " +
|
||||
"TransientSolrCoreCacheFactory was done incorrectly since the default should have been used otherwise.");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
return transientCoreCache.getTransientSolrCoreCache();
|
||||
|
|
|
@ -80,7 +80,9 @@ public class SolrDeletionPolicy extends IndexDeletionPolicy implements NamedList
|
|||
if (commits.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
log.debug("SolrDeletionPolicy.onInit: commits: {}", new CommitsLoggingDebug(commits));
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("SolrDeletionPolicy.onInit: commits: {}", new CommitsLoggingDebug(commits));
|
||||
}
|
||||
updateCommits(commits);
|
||||
}
|
||||
|
||||
|
@ -89,7 +91,9 @@ public class SolrDeletionPolicy extends IndexDeletionPolicy implements NamedList
|
|||
*/
|
||||
@Override
|
||||
public void onCommit(List<? extends IndexCommit> commits) throws IOException {
|
||||
log.debug("SolrDeletionPolicy.onCommit: commits: {}", new CommitsLoggingDebug(commits));
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("SolrDeletionPolicy.onCommit: commits: {}", new CommitsLoggingDebug(commits));
|
||||
}
|
||||
updateCommits(commits);
|
||||
}
|
||||
|
||||
|
@ -150,7 +154,9 @@ public class SolrDeletionPolicy extends IndexDeletionPolicy implements NamedList
|
|||
synchronized (this) {
|
||||
long maxCommitAgeTimeStamp = -1L;
|
||||
IndexCommit newest = commits.get(commits.size() - 1);
|
||||
log.debug("newest commit generation = " + newest.getGeneration());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("newest commit generation = {}", newest.getGeneration());
|
||||
}
|
||||
int singleSegKept = (newest.getSegmentCount() == 1) ? 1 : 0;
|
||||
int totalKept = 1;
|
||||
|
||||
|
|
|
@ -74,7 +74,9 @@ public final class SolrPaths {
|
|||
} catch (NamingException e) {
|
||||
log.debug("No /solr/home in JNDI");
|
||||
} catch (RuntimeException ex) {
|
||||
log.warn("Odd RuntimeException while testing for JNDI: " + ex.getMessage());
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Odd RuntimeException while testing for JNDI: {}", ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// Now try system property
|
||||
|
@ -104,7 +106,8 @@ public final class SolrPaths {
|
|||
log.warn("Unable to create [{}] directory in SOLR_HOME [{}]. Features requiring this directory may fail.", USER_FILES_DIRECTORY, solrHome);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Unable to create [" + USER_FILES_DIRECTORY + "] directory in SOLR_HOME [" + solrHome + "]. Features requiring this directory may fail.", e);
|
||||
log.warn("Unable to create [{}] directory in SOLR_HOME [{}]. Features requiring this directory may fail.",
|
||||
USER_FILES_DIRECTORY, solrHome, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -180,12 +180,14 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
|
|||
this.classLoader = newLoader;
|
||||
this.needToReloadLuceneSPI = true;
|
||||
|
||||
log.info("Added {} libs to classloader, from paths: {}",
|
||||
urls.size(), urls.stream()
|
||||
.map(u -> u.getPath().substring(0,u.getPath().lastIndexOf("/")))
|
||||
.sorted()
|
||||
.distinct()
|
||||
.collect(Collectors.toList()));
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Added {} libs to classloader, from paths: {}",
|
||||
urls.size(), urls.stream()
|
||||
.map(u -> u.getPath().substring(0, u.getPath().lastIndexOf("/")))
|
||||
.sorted()
|
||||
.distinct()
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -220,7 +222,9 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
|
|||
allURLs.addAll(Arrays.asList(oldLoader.getURLs()));
|
||||
allURLs.addAll(urls);
|
||||
for (URL url : urls) {
|
||||
log.debug("Adding '{}' to classloader", url.toString());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding '{}' to classloader", url.toString());
|
||||
}
|
||||
}
|
||||
|
||||
ClassLoader oldParent = oldLoader.getParent();
|
||||
|
@ -447,7 +451,7 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
|
|||
return Class.forName(c, true, classLoader).asSubclass(expectedType);
|
||||
} catch (ClassNotFoundException | ClassCastException e) {
|
||||
// this can happen if the legacyAnalysisPattern below caches the wrong thing
|
||||
log.warn( name + " Unable to load cached class, attempting lookup. name={} shortname={} reason={}", c, cname, e);
|
||||
log.warn("{} Unable to load cached class, attempting lookup. name={} shortname={} reason={}", name , c, cname, e);
|
||||
classNameCache.remove(cname);
|
||||
}
|
||||
}
|
||||
|
@ -468,7 +472,9 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
|
|||
} else if (TokenFilterFactory.class.isAssignableFrom(expectedType)) {
|
||||
return clazz = TokenFilterFactory.lookupClass(name).asSubclass(expectedType);
|
||||
} else {
|
||||
log.warn("'{}' looks like an analysis factory, but caller requested different class type: {}", cname, expectedType.getName());
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("'{}' looks like an analysis factory, but caller requested different class type: {}", cname, expectedType.getName());
|
||||
}
|
||||
}
|
||||
} catch (IllegalArgumentException ex) {
|
||||
// ok, we fall back to legacy loading
|
||||
|
@ -486,7 +492,7 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
|
|||
for (String subpackage : subpackages) {
|
||||
try {
|
||||
String name = base + '.' + subpackage + newName;
|
||||
log.trace("Trying class name " + name);
|
||||
log.trace("Trying class name {}", name);
|
||||
return clazz = Class.forName(name, true, classLoader).asSubclass(expectedType);
|
||||
} catch (ClassNotFoundException e1) {
|
||||
// ignore... assume first exception is best.
|
||||
|
@ -555,8 +561,9 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
|
|||
}
|
||||
|
||||
} catch (Error err) {
|
||||
log.error("Loading Class " + cName + " (" + clazz.getName() + ") triggered serious java error: "
|
||||
+ err.getClass().getName(), err);
|
||||
log.error("Loading Class {} ({}) triggered serious java error: {}", cName, clazz.getName(),
|
||||
err.getClass().getName(), err);
|
||||
|
||||
throw err;
|
||||
|
||||
} catch (Exception e) {
|
||||
|
@ -649,7 +656,9 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
|
|||
try {
|
||||
infoRegistry.put(bean.getName(), bean);
|
||||
} catch (Exception e) {
|
||||
log.warn("could not register MBean '" + bean.getName() + "'.", e);
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("could not register MBean '{}'.", bean.getName(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -754,7 +763,7 @@ public class SolrResourceLoader implements ResourceLoader, Closeable {
|
|||
try (OutputStream out = new FileOutputStream(confFile);) {
|
||||
out.write(content);
|
||||
}
|
||||
log.info("Written confile " + resourceName);
|
||||
log.info("Written confile {}", resourceName);
|
||||
} catch (IOException e) {
|
||||
final String msg = "Error persisting conf file " + resourceName;
|
||||
log.error(msg, e);
|
||||
|
|
|
@ -206,7 +206,9 @@ public class SolrXmlConfig {
|
|||
return properties;
|
||||
}
|
||||
catch (XPathExpressionException e) {
|
||||
log.warn("Error parsing solr.xml: " + e.getMessage());
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Error parsing solr.xml: {}", e.getMessage());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -530,7 +532,7 @@ public class SolrXmlConfig {
|
|||
// if there's an MBean server running but there was no JMX reporter then add a default one
|
||||
MBeanServer mBeanServer = JmxUtil.findFirstMBeanServer();
|
||||
if (mBeanServer != null && !hasJmxReporter) {
|
||||
log.info("MBean server found: " + mBeanServer + ", but no JMX reporters were configured - adding default JMX reporter.");
|
||||
log.info("MBean server found: {}, but no JMX reporters were configured - adding default JMX reporter.", mBeanServer);
|
||||
Map<String,Object> attributes = new HashMap<>();
|
||||
attributes.put("name", "default");
|
||||
attributes.put("class", SolrJmxReporter.class.getName());
|
||||
|
|
|
@ -60,7 +60,7 @@ public class StandardDirectoryFactory extends CachingDirectoryFactory {
|
|||
protected LockFactory createLockFactory(String rawLockType) throws IOException {
|
||||
if (null == rawLockType) {
|
||||
rawLockType = DirectoryFactory.LOCK_TYPE_NATIVE;
|
||||
log.warn("No lockType configured, assuming '"+rawLockType+"'.");
|
||||
log.warn("No lockType configured, assuming '{}'.", rawLockType);
|
||||
}
|
||||
final String lockType = rawLockType.toLowerCase(Locale.ROOT).trim();
|
||||
switch (lockType) {
|
||||
|
|
|
@ -90,7 +90,9 @@ public class TransientSolrCoreCacheDefault extends TransientSolrCoreCache {
|
|||
protected boolean removeEldestEntry(Map.Entry<String, SolrCore> eldest) {
|
||||
if (size() > cacheSize) {
|
||||
SolrCore coreToClose = eldest.getValue();
|
||||
log.info("Closing transient core [{}]", coreToClose.getName());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Closing transient core [{}]", coreToClose.getName());
|
||||
}
|
||||
coreContainer.queueCoreToClose(coreToClose);
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -136,7 +136,7 @@ public class XmlConfigFile { // formerly simply "Config"
|
|||
dbf.setXIncludeAware(true);
|
||||
dbf.setNamespaceAware(true);
|
||||
} catch(UnsupportedOperationException e) {
|
||||
log.warn(name + " XML parser doesn't support XInclude option");
|
||||
log.warn("{} XML parser doesn't support XInclude option", name);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -252,7 +252,7 @@ public class XmlConfigFile { // formerly simply "Config"
|
|||
if (errIfMissing) {
|
||||
throw new RuntimeException(name + " missing "+path);
|
||||
} else {
|
||||
log.trace(name + " missing optional " + path);
|
||||
log.trace("{} missing optional {}", name, path);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -261,7 +261,7 @@ public class XmlConfigFile { // formerly simply "Config"
|
|||
name + " contains more than one value for config path: " + path);
|
||||
}
|
||||
Node nd = nodes.item(0);
|
||||
log.trace(name + ":" + path + "=" + nd);
|
||||
log.trace("{}:{}={}", name, path, nd);
|
||||
return nd;
|
||||
|
||||
} catch (XPathExpressionException e) {
|
||||
|
@ -286,12 +286,12 @@ public class XmlConfigFile { // formerly simply "Config"
|
|||
if (errIfMissing) {
|
||||
throw new RuntimeException(name + " missing "+path);
|
||||
} else {
|
||||
log.trace(name + " missing optional " + path);
|
||||
log.trace("{} missing optional {}", name, path);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
log.trace(name + ":" + path + "=" + nodeList);
|
||||
log.trace("{}:{}={}", name, path, nodeList);
|
||||
return nodeList;
|
||||
|
||||
} catch (XPathExpressionException e) {
|
||||
|
@ -373,7 +373,7 @@ public class XmlConfigFile { // formerly simply "Config"
|
|||
|
||||
String txt = DOMUtil.getText(nd);
|
||||
|
||||
log.debug(name + ' '+path+'='+txt);
|
||||
log.debug("{} {}={}", name, path, txt);
|
||||
return txt;
|
||||
}
|
||||
|
||||
|
|
|
@ -99,9 +99,9 @@ public class ZkContainer {
|
|||
// If this is an ensemble, allow for a long connect time for other servers to come up
|
||||
if (zkRun != null && zkServer.getServers().size() > 1) {
|
||||
zkClientConnectTimeout = 24 * 60 * 60 * 1000; // 1 day for embedded ensemble
|
||||
log.info("Zookeeper client=" + zookeeperHost + " Waiting for a quorum.");
|
||||
log.info("Zookeeper client={} Waiting for a quorum.", zookeeperHost);
|
||||
} else {
|
||||
log.info("Zookeeper client=" + zookeeperHost);
|
||||
log.info("Zookeeper client={}", zookeeperHost);
|
||||
}
|
||||
String confDir = System.getProperty("bootstrap_confdir");
|
||||
boolean boostrapConf = Boolean.getBoolean("bootstrap_conf");
|
||||
|
@ -178,7 +178,9 @@ public class ZkContainer {
|
|||
try {
|
||||
if (testing_beforeRegisterInZk != null) {
|
||||
boolean didTrigger = testing_beforeRegisterInZk.test(cd);
|
||||
log.debug((didTrigger ? "Ran" : "Skipped") + " pre-zk hook");
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug((didTrigger ? "Ran" : "Skipped") + " pre-zk hook");
|
||||
}
|
||||
}
|
||||
if (!core.getCoreContainer().isShutDown()) {
|
||||
zkController.register(core.getName(), cd, skipRecovery);
|
||||
|
|
|
@ -227,7 +227,9 @@ public class SolrSnapshotManager {
|
|||
public void onInit(List<? extends IndexCommit> commits) throws IOException {
|
||||
for (IndexCommit ic : commits) {
|
||||
if (gen == ic.getGeneration()) {
|
||||
log.info("Deleting non-snapshotted index commit with generation {}", ic.getGeneration());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Deleting non-snapshotted index commit with generation {}", ic.getGeneration());
|
||||
}
|
||||
ic.delete();
|
||||
}
|
||||
}
|
||||
|
@ -258,7 +260,9 @@ public class SolrSnapshotManager {
|
|||
public void onInit(List<? extends IndexCommit> commits) throws IOException {
|
||||
for (IndexCommit ic : commits) {
|
||||
if (!genNumbers.contains(ic.getGeneration())) {
|
||||
log.info("Deleting non-snapshotted index commit with generation {}", ic.getGeneration());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Deleting non-snapshotted index commit with generation {}", ic.getGeneration());
|
||||
}
|
||||
ic.delete();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -171,8 +171,10 @@ public class SolrSnapshotMetaDataManager {
|
|||
public synchronized void snapshot(String name, String indexDirPath, long gen) throws IOException {
|
||||
Objects.requireNonNull(name);
|
||||
|
||||
log.info("Creating the snapshot named {} for core {} associated with index commit with generation {} in directory {}"
|
||||
, name, solrCore.getName(), gen, indexDirPath);
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Creating the snapshot named {} for core {} associated with index commit with generation {} in directory {}"
|
||||
, name, solrCore.getName(), gen, indexDirPath);
|
||||
}
|
||||
|
||||
if(nameToDetailsMapping.containsKey(name)) {
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, "A snapshot with name " + name + " already exists");
|
||||
|
@ -204,7 +206,9 @@ public class SolrSnapshotMetaDataManager {
|
|||
* @throws IOException in case of I/O error
|
||||
*/
|
||||
public synchronized Optional<SnapshotMetaData> release(String name) throws IOException {
|
||||
log.info("Deleting the snapshot named {} for core {}", name, solrCore.getName());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Deleting the snapshot named {} for core {}", name, solrCore.getName());
|
||||
}
|
||||
SnapshotMetaData result = nameToDetailsMapping.remove(Objects.requireNonNull(name));
|
||||
if(result != null) {
|
||||
boolean success = false;
|
||||
|
@ -307,7 +311,9 @@ public class SolrSnapshotMetaDataManager {
|
|||
solrCore.getDirectoryFactory().release(d);
|
||||
}
|
||||
} else {
|
||||
log.warn("Commit with name {} is not persisted for core {}", commitName, solrCore.getName());
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Commit with name {} is not persisted for core {}", commitName, solrCore.getName());
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
|
|
|
@ -111,7 +111,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
|
|||
CLIO.out("Successfully created snapshot with name " + snapshotName + " for collection " + collectionName);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to create a snapshot with name " + snapshotName + " for collection " + collectionName, e);
|
||||
log.error("Failed to create a snapshot with name {} for collection {}", snapshotName, collectionName, e);
|
||||
CLIO.out("Failed to create a snapshot with name " + snapshotName + " for collection " + collectionName
|
||||
+" due to following error : "+e.getLocalizedMessage());
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
|
|||
CLIO.out("Successfully deleted snapshot with name " + snapshotName + " for collection " + collectionName);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to delete a snapshot with name " + snapshotName + " for collection " + collectionName, e);
|
||||
log.error("Failed to delete a snapshot with name {} for collection {}", snapshotName, collectionName, e);
|
||||
CLIO.out("Failed to delete a snapshot with name " + snapshotName + " for collection " + collectionName
|
||||
+" due to following error : "+e.getLocalizedMessage());
|
||||
}
|
||||
|
@ -146,7 +146,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
|
|||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to list snapshots for collection " + collectionName, e);
|
||||
log.error("Failed to list snapshots for collection {}", collectionName, e);
|
||||
CLIO.out("Failed to list snapshots for collection " + collectionName
|
||||
+" due to following error : "+e.getLocalizedMessage());
|
||||
}
|
||||
|
@ -275,8 +275,9 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
|
|||
buildCopyListings(collectionName, snapshotName, localFsPath, pathPrefix);
|
||||
CLIO.out("Successfully prepared copylisting for the snapshot export.");
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to prepare a copylisting for snapshot with name " + snapshotName + " for collection "
|
||||
+ collectionName, e);
|
||||
|
||||
log.error("Failed to prepare a copylisting for snapshot with name {} for collection {}", snapshotName, collectionName, e);
|
||||
|
||||
CLIO.out("Failed to prepare a copylisting for snapshot with name " + snapshotName + " for collection "
|
||||
+ collectionName + " due to following error : " + e.getLocalizedMessage());
|
||||
System.exit(1);
|
||||
|
@ -286,7 +287,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
|
|||
backupCollectionMetaData(collectionName, snapshotName, destPath);
|
||||
CLIO.out("Successfully backed up collection meta-data");
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to backup collection meta-data for collection " + collectionName, e);
|
||||
log.error("Failed to backup collection meta-data for collection {}", collectionName, e);
|
||||
CLIO.out("Failed to backup collection meta-data for collection " + collectionName
|
||||
+ " due to following error : " + e.getLocalizedMessage());
|
||||
System.exit(1);
|
||||
|
@ -306,7 +307,7 @@ public class SolrSnapshotsTool implements Closeable, CLIO {
|
|||
// if asyncId is null, processAsync will block and throw an Exception with any error
|
||||
backup.processAsync(asyncReqId.orElse(null), solrClient);
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to backup collection meta-data for collection " + collectionName, e);
|
||||
log.error("Failed to backup collection meta-data for collection {}", collectionName, e);
|
||||
CLIO.out("Failed to backup collection meta-data for collection " + collectionName
|
||||
+ " due to following error : " + e.getLocalizedMessage());
|
||||
System.exit(1);
|
||||
|
|
|
@ -125,7 +125,9 @@ public class DistribPackageStore implements PackageStore {
|
|||
this.metaData = meta;
|
||||
this.fileData = data;
|
||||
_persistToFile(solrhome, path, data, meta);
|
||||
log.info("persisted a file {} and metadata. sizes {} {}", path, data.limit(), meta.limit());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("persisted a file {} and metadata. sizes {} {}", path, data.limit(), meta.limit());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -164,7 +166,7 @@ public class DistribPackageStore implements PackageStore {
|
|||
try {
|
||||
IOUtils.deleteFilesIfExist(getRealpath(path), getRealpath(getMetaPath()));
|
||||
} catch (IOException e) {
|
||||
log.error("Unable to delete files: " + path);
|
||||
log.error("Unable to delete files: {}", path);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -366,8 +368,7 @@ public class DistribPackageStore implements PackageStore {
|
|||
//fire and forget
|
||||
Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(), url, null);
|
||||
} catch (Exception e) {
|
||||
log.info("Node: " + node +
|
||||
" failed to respond for file fetch notification", e);
|
||||
log.info("Node: {} failed to respond for file fetch notification", node, e);
|
||||
//ignore the exception
|
||||
// some nodes may be down or not responding
|
||||
}
|
||||
|
@ -487,7 +488,7 @@ public class DistribPackageStore implements PackageStore {
|
|||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Could not refresh files in " +path, e);
|
||||
log.error("Could not refresh files in {}", path, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,7 +522,7 @@ public class DistribPackageStore implements PackageStore {
|
|||
log.warn("Unable to create [{}] directory in SOLR_HOME [{}]. Features requiring this directory may fail.", packageStoreDir, solrHome);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Unable to create [" + packageStoreDir + "] directory in SOLR_HOME [" + solrHome + "]. Features requiring this directory may fail.", e);
|
||||
log.warn("Unable to create [{}] directory in SOLR_HOME [{}]. Features requiring this directory may fail.", packageStoreDir, solrHome, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -254,7 +254,7 @@ public class PackageStoreAPI {
|
|||
try {
|
||||
packageStore.fetch(pathCopy, getFrom);
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to download file: " + pathCopy, e);
|
||||
log.error("Failed to download file: {}", pathCopy, e);
|
||||
}
|
||||
log.info("downloaded file: {}", pathCopy);
|
||||
});
|
||||
|
|
|
@ -114,7 +114,9 @@ public abstract class ManagedResource {
|
|||
NamedList<?> clonedArgs = args.clone();
|
||||
observer.onManagedResourceInitialized(clonedArgs,this);
|
||||
}
|
||||
log.info("Notified {} observers of {}", observers.size(), getResourceId());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Notified {} observers of {}", observers.size(), getResourceId());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -265,7 +267,7 @@ public abstract class ManagedResource {
|
|||
// note: the data we're managing now remains in a dubious state
|
||||
// however the text analysis component remains unaffected
|
||||
// (at least until core reload)
|
||||
log.error("Failed to load data from storage due to: "+reloadExc);
|
||||
log.error("Failed to load data from storage due to: {}", reloadExc);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -360,8 +362,10 @@ public abstract class ManagedResource {
|
|||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public synchronized void doPut(BaseSolrResource endpoint, Representation entity, Object json) {
|
||||
|
||||
log.info("Processing update to {}: {} is a "+json.getClass().getName(), getResourceId(), json);
|
||||
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Processing update to {}: {} is a {}", getResourceId(), json, json.getClass().getName());
|
||||
}
|
||||
|
||||
boolean updatedInitArgs = false;
|
||||
Object managedData = null;
|
||||
|
|
|
@ -110,7 +110,7 @@ public abstract class ManagedResourceStorage {
|
|||
} else {
|
||||
if (zkClient != null) {
|
||||
String znodeBase = "/configs/"+zkConfigName;
|
||||
log.debug("Setting up ZooKeeper-based storage for the RestManager with znodeBase: "+znodeBase);
|
||||
log.debug("Setting up ZooKeeper-based storage for the RestManager with znodeBase: {}", znodeBase);
|
||||
storageIO = new ManagedResourceStorage.ZooKeeperStorageIO(zkClient, znodeBase);
|
||||
} else {
|
||||
storageIO = new FileStorageIO();
|
||||
|
@ -133,8 +133,9 @@ public abstract class ManagedResourceStorage {
|
|||
// that doesn't have write-access to the config dir
|
||||
// while this failover approach is not ideal, it's better
|
||||
// than causing the core to fail esp. if managed resources aren't being used
|
||||
log.warn("Cannot write to config directory "+configDir.getAbsolutePath()+
|
||||
"; switching to use InMemory storage instead.");
|
||||
if (log.isWarnEnabled()) {
|
||||
log.warn("Cannot write to config directory {} ; switching to use InMemory storage instead.", configDir.getAbsolutePath());
|
||||
}
|
||||
storageIO = new ManagedResourceStorage.InMemoryStorageIO();
|
||||
}
|
||||
}
|
||||
|
@ -165,7 +166,7 @@ public abstract class ManagedResourceStorage {
|
|||
dir.mkdirs();
|
||||
|
||||
storageDir = dir.getAbsolutePath();
|
||||
log.info("File-based storage initialized to use dir: "+storageDir);
|
||||
log.info("File-based storage initialized to use dir: {}", storageDir);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -238,7 +239,7 @@ public abstract class ManagedResourceStorage {
|
|||
throw new SolrException(ErrorCode.SERVER_ERROR, errMsg, exc);
|
||||
}
|
||||
|
||||
log.info("Configured ZooKeeperStorageIO with znodeBase: "+znodeBase);
|
||||
log.info("Configured ZooKeeperStorageIO with znodeBase: {}", znodeBase);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -446,9 +447,11 @@ public abstract class ManagedResourceStorage {
|
|||
writer.close();
|
||||
} catch (Exception ignore){}
|
||||
}
|
||||
}
|
||||
log.info("Saved JSON object to path {} using {}",
|
||||
storedResourceId, storageIO.getInfo());
|
||||
}
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Saved JSON object to path {} using {}",
|
||||
storedResourceId, storageIO.getInfo());
|
||||
}
|
||||
}
|
||||
} // end JsonStorage
|
||||
|
||||
|
@ -489,8 +492,10 @@ public abstract class ManagedResourceStorage {
|
|||
*/
|
||||
public Object load(String resourceId) throws IOException {
|
||||
String storedResourceId = getStoredResourceId(resourceId);
|
||||
|
||||
log.debug("Reading {} using {}", storedResourceId, storageIO.getInfo());
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Reading {} using {}", storedResourceId, storageIO.getInfo());
|
||||
}
|
||||
|
||||
InputStream inputStream = storageIO.openInputStream(storedResourceId);
|
||||
if (inputStream == null) {
|
||||
|
@ -501,9 +506,11 @@ public abstract class ManagedResourceStorage {
|
|||
parsed = parseText(reader, resourceId);
|
||||
}
|
||||
|
||||
String objectType = (parsed != null) ? parsed.getClass().getSimpleName() : "null";
|
||||
log.info(String.format(Locale.ROOT, "Loaded %s at path %s using %s",
|
||||
objectType, storedResourceId, storageIO.getInfo()));
|
||||
String objectType = (parsed != null) ? parsed.getClass().getSimpleName() : "null";
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info(String.format(Locale.ROOT, "Loaded %s at path %s using %s",
|
||||
objectType, storedResourceId, storageIO.getInfo()));
|
||||
}
|
||||
|
||||
return parsed;
|
||||
}
|
||||
|
|
|
@ -219,14 +219,17 @@ public class RestManager {
|
|||
|
||||
if (observer != null) {
|
||||
reg.observers.add(observer);
|
||||
log.info("Added observer of type {} to existing ManagedResource {}",
|
||||
observer.getClass().getName(), resourceId);
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Added observer of type {} to existing ManagedResource {}",
|
||||
observer.getClass().getName(), resourceId);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
registered.put(resourceId,
|
||||
new ManagedResourceRegistration(resourceId, implClass, observer));
|
||||
log.info("Registered ManagedResource impl {} for path {}",
|
||||
implClass.getName(), resourceId);
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Registered ManagedResource impl {} for path {}", implClass.getName(), resourceId);
|
||||
}
|
||||
}
|
||||
|
||||
// there may be a RestManager, in which case, we want to add this new ManagedResource immediately
|
||||
|
@ -334,7 +337,7 @@ public class RestManager {
|
|||
}
|
||||
}
|
||||
|
||||
log.info("Found ManagedResource ["+managedResource+"] for "+resourceId);
|
||||
log.info("Found ManagedResource [{}] for {}", managedResource, resourceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -614,7 +617,7 @@ public class RestManager {
|
|||
StorageIO storageIO)
|
||||
throws SolrException
|
||||
{
|
||||
log.debug("Initializing RestManager with initArgs: "+initArgs);
|
||||
log.debug("Initializing RestManager with initArgs: {}", initArgs);
|
||||
|
||||
if (storageIO == null)
|
||||
throw new IllegalArgumentException(
|
||||
|
@ -633,7 +636,9 @@ public class RestManager {
|
|||
managed.put(SCHEMA_BASE_PATH+MANAGED_ENDPOINT, endpoint);
|
||||
|
||||
// init registered managed resources
|
||||
log.debug("Initializing {} registered ManagedResources", registry.registered.size());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Initializing {} registered ManagedResources", registry.registered.size());
|
||||
}
|
||||
for (ManagedResourceRegistration reg : registry.registered.values()) {
|
||||
// keep track of this for lookups during request processing
|
||||
managed.put(reg.resourceId, createManagedResource(reg));
|
||||
|
@ -751,7 +756,7 @@ public class RestManager {
|
|||
res.onResourceDeleted();
|
||||
} catch (IOException e) {
|
||||
// the resource is already deleted so just log this
|
||||
log.error("Error when trying to clean-up after deleting "+resourceId, e);
|
||||
log.error("Error when trying to clean-up after deleting {}",resourceId, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -165,7 +165,9 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
|
|||
cpsm.mappings.put(key, sortedVals);
|
||||
}
|
||||
}
|
||||
log.info("Loaded {} synonym mappings for {}", synonymMappings.size(), getResourceId());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Loaded {} synonym mappings for {}", synonymMappings.size(), getResourceId());
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
|
|
@ -160,7 +160,9 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
|
|||
cpsm.mappings.put(key, sortedVals);
|
||||
}
|
||||
}
|
||||
log.info("Loaded {} synonym mappings for {}", synonymMappings.size(), getResourceId());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Loaded {} synonym mappings for {}", synonymMappings.size(), getResourceId());
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
|
|
@ -112,8 +112,9 @@ public class ManagedWordSetResource extends ManagedResource
|
|||
} else {
|
||||
storeManagedData(new ArrayList<String>(0)); // stores an empty word set
|
||||
}
|
||||
|
||||
log.info("Loaded "+managedWords.size()+" words for "+getResourceId());
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("Loaded {} words for {}", managedWords.size(), getResourceId());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -162,7 +163,7 @@ public class ManagedWordSetResource extends ManagedResource
|
|||
boolean madeChanges = false;
|
||||
List<String> words = (List<String>)updates;
|
||||
|
||||
log.info("Applying updates: "+words);
|
||||
log.info("Applying updates: {}", words);
|
||||
boolean ignoreCase = getIgnoreCase();
|
||||
for (String word : words) {
|
||||
if (ignoreCase)
|
||||
|
|
Loading…
Reference in New Issue