diff --git a/gradle/validation/precommit.gradle b/gradle/validation/precommit.gradle index 79347646454..aad8c6f071a 100644 --- a/gradle/validation/precommit.gradle +++ b/gradle/validation/precommit.gradle @@ -40,7 +40,8 @@ configure(rootProject) { "javadoc", "rat", "ecjLint", - "checkMissingDocs" + "checkMissingDocs", + "validateLogCalls" ]} } } diff --git a/gradle/validation/validate-log-calls.gradle b/gradle/validation/validate-log-calls.gradle index 90f91dc3f2f..b792e14c1c2 100644 --- a/gradle/validation/validate-log-calls.gradle +++ b/gradle/validation/validate-log-calls.gradle @@ -17,147 +17,33 @@ import org.apache.tools.ant.BuildException -import static java.util.concurrent.TimeUnit.SECONDS - // Checks logging calls to keep from using patterns that might be expensive -// either in CPU or unnecessary object creation +// either in CPU or unnecessary object creation. Some of the checks are a little +// over-zealous, but it's worth a bit of aggravation to catch the egregious +// mistakes before they accrue. -configure(rootProject) { -} +// This all started with Java Flight Recorder showing a massive number of +// objects allocated and garbage collected. Tracing a bunch of them down showed +// that we have a number of calls like: +// log.debug("some string" + very_expensive_object_to_make_a_string). +// which _always_ constructed the very expensive ojbect even if the log level +// was INFO or more severe. Rather than leave lots of confusing formats laying +// about or expecting people to remember the rules, we opted to make this +// check part of Gradle precommit. It will be included by default in Solr +// 9.0. + +// See the associated help task "gradlew helpValidateLogCalls" allprojects { plugins.withType(JavaPlugin) { task validateLogCalls(type: ValidateLogCallsTask) { description "Checks that log calls are either validated or conform to efficient patterns." group "verification" - - doFirst { - if (project.hasProperty('srcDir')) { - srcDir.addAll(project.getProperty('srcDir').split(',')) - } else { // Remove this later, make it optional - //TODO - throw new BuildException(String.format(Locale.ENGLISH, - '''Until we get all the calls cleaned up, you MUST specify -PsrcDir=relative_path, e.g. - "-PsrcDir=solr/core/src/java/org/apache/solr/core". This task will recursively check all - "*.java" files under that directory''')) - } - - checkPlus = Boolean.valueOf(propertyOrDefault('checkPlus', 'false')) - } } } -// // Attach ecjLint to check. -// check.dependsOn ecjLint - // What does the below mean? -// // Each validation task should be attached to check but make sure -// // precommit() as a whole is a dependency on rootProject.check -// check.dependsOn precommit - } -//} class ValidateLogCallsTask extends DefaultTask { - @Input - List srcDir = [] - - @Input - boolean checkPlus - - // TODO, remove when you go to project-based checking. - Set dirsToCheck = [ - "solr/core/src/java/org/apache/solr/analysis" - , "solr/core/src/java/org/apache/solr/api" - , "solr/core/src/java/org/apache/solr/client" - , "solr/core/src/java/org/apache/solr/cloud" // 120 - , "solr/core/src/java/org/apache/solr/cloud/api" - , "solr/core/src/java/org/apache/solr/cloud/autoscaling" - , "solr/core/src/java/org/apache/solr/cloud/cdcr" - , "solr/core/src/java/org/apache/solr/cloud/hdfs" - , "solr/core/src/java/org/apache/solr/cloud/overseer" - , "solr/core/src/java/org/apache/solr/cloud/rule" - , "solr/core/src/java/org/apache/solr/core" - , "solr/core/src/java/org/apache/solr/filestore" - , "solr/core/src/java/org/apache/solr/handler/admin" - , "solr/core/src/java/org/apache/solr/handler/component" - , "solr/core/src/java/org/apache/solr/handler/export" - , "solr/core/src/java/org/apache/solr/handler/loader" - , "solr/core/src/java/org/apache/solr/handler/tagger" - , "solr/core/src/java/org/apache/solr/highlight" - , "solr/core/src/java/org/apache/solr/index" - , "solr/core/src/java/org/apache/solr/internal" - , "solr/core/src/java/org/apache/solr/legacy" - , "solr/core/src/java/org/apache/solr/logging" - , "solr/core/src/java/org/apache/solr/metrics" - , "solr/core/src/java/org/apache/solr/packagemanager" - , "solr/core/src/java/org/apache/solr/parser" - , "solr/core/src/java/org/apache/solr/pkg" - , "solr/core/src/java/org/apache/solr/query" - , "solr/core/src/java/org/apache/solr/request" - , "solr/core/src/java/org/apache/solr/response" - , "solr/core/src/java/org/apache/solr/rest" - , "solr/core/src/java/org/apache/solr/schema" - , "solr/core/src/java/org/apache/solr/search" - , "solr/core/src/java/org/apache/solr/security" - , "solr/core/src/java/org/apache/solr/servlet" - , "solr/core/src/java/org/apache/solr/spelling" - , "solr/core/src/java/org/apache/solr/store" - , "solr/core/src/java/org/apache/solr/uninverting" - , "solr/core/src/java/org/apache/solr/update" - , "solr/core/src/java/org/apache/solr/util" - , "solr/solrj" - , "solr/core/src/java/org/apache/solr/handler" - , "solr/core/src/test/org/apache/solr/cloud/api" - , "solr/core/src/test/org/apache/solr/cloud/autoscaling" -// , "solr/core/src/test/org/apache/solr/cloud" -// , "solr/core/src/test/org/apache/solr/cloud/cdcr" -// , "solr/core/src/test/org/apache/solr/handler" -// , "solr/core/src/test/org/apache/solr/metrics" -// , "solr/core/src/test/org/apache/solr/request" -// , "solr/core/src/test/org/apache/solr/response" -// , "solr/core/src/test/org/apache/solr/schema" -// , "solr/core/src/test/org/apache/solr/search" -// , "solr/core/src/test/org/apache/solr/security" -// , "solr/core/src/test/org/apache/solr/spelling" -// , "solr/core/src/test/org/apache/solr" -// , "solr/core/src/test/org/apache/solr/update" -// , "solr/core/src/test/org/apache/solr/util" -// , "solr/core/src/test" -// , "solr/core" - - ] - - //TODO REMOVE ME! Really! and the check for bare parens. Several times I've put in () when I meant {} and only - // caught it by chance. So for this mass edit, I created a check with a a lot of false positives. This is a list of - // them and we won't report them. - - Map> parenHack = [ - "AddReplicaCmd.java" : [99] - , "Assign.java" : [329] - , "CloudSolrClientTest.java" : [1083] - , "CommitTracker.java" : [135] - , "DeleteReplicaCmd.java" : [75] - , "DirectUpdateHandler2.java" : [838, 859] - , "ManagedIndexSchemaFactory.java": [284] - , "MoveReplicaCmd.java" : [75] - , "PeerSync.java" : [704] - , "RecordingJSONParser.java" : [76] - , "SliceMutator.java" : [61] - , "SolrDispatchFilter.java" : [150, 205, 242] - , "Suggester.java" : [147, 181] - , "TestSimTriggerIntegration.java" : [710, 713] - , "TestSolrJErrorHandling.java" : [289] - , "TriggerIntegrationTest.java" : [427, 430] - , "UpdateLog.java" : [1976] - , "V2HttpCall.java" : [158] - // checking against 8x in master, take these out usually. -// , "CoreContainer.java" : [1096] -// , "ConcurrentLFUCache.java" : [700, 911] -// , "ConcurrentLRUCache.java" : [911] -// , "DirectUpdateHandler2.java" : [844, 865] -// , "PeerSync.java" : [697] -// , "SolrIndexWriter.java" : [356] -// , "UpdateLog.java" : [1973] - ] def logLevels = ["log.trace", "log.debug", "log.info", "log.warn", "log.error", "log.fatal"] def errsFound = 0; @@ -170,32 +56,16 @@ class ValidateLogCallsTask extends DefaultTask { // We have a log.something line, check for patterns we're not fond of. def checkLine(File file, String line, int lineNumber, String previous) { - boolean violation = false - def bareParens = (line =~ /".*?"/).findAll() - bareParens.each({ part -> - if (part.contains("()")) { - List hack = parenHack.get(file.name) - if (hack == null || hack.contains(lineNumber) == false) { - violation = true - } - } - }) - - // If the line has been explicitly checked, skip it. + // If the line has been explicitly OK'd, then it's OK! if (line.replaceAll("\\s", "").toLowerCase().contains("//logok")) { return } // Strip all of the comments, things in quotes and the like. - def level = "" - def lev = (line =~ "log\\.(.*?)\\(") - if (lev.find()) { - level = lev.group(1).toLowerCase().trim() - } def stripped = - line.replaceFirst("//.*", " ") // remove comment to EOL. Again, fragile due to the possibility of embedded double slashes - .replaceFirst(/.*?\(/, " ") // Get rid of "log.info(" + line.replaceFirst("//.*", " ") // remove comment to EOL. May be fragile due to the possibility of embedded double slashes + .replaceFirst(/.*?\(/, " ") // Get rid of "log.whatever(" .replaceFirst(/\);/, " ") // get rid of the closing ");" .replaceFirst("/\\*.*?\\*/", " ") // replace embedded comments "/*....*/" .replaceAll(/".*?"/, '""') // remove anything between quotes. This is a bit fragile if there are embedded double quotes. @@ -203,12 +73,9 @@ class ValidateLogCallsTask extends DefaultTask { .replaceAll(/TimeUnit\..*?\.convert\(.*?\)/, " ") // again, a pattern that's efficient .replaceAll("\\s", "") - def m = stripped =~ "\\(.*?\\)" - def hasParens = m.find() - // The compiler will pre-assemble patterns like 'log.info("string const1 {}" + " string const2 {}", obj1, obj2)' - // to log.info("string const1 {} string const2 {}", obj1, obj2)', so don't worry about any plus preceeded and - // followed by double quotes. + // to log.info("string const1 {} string const2 {}", obj1, obj2)', so don't worry about any plus preceded and + // followed by double quotes, otherwise flag it. def hasPlus = false for (int idx = 0; idx < stripped.length(); ++idx) { if (stripped.charAt(idx) == '+') { @@ -219,73 +86,87 @@ class ValidateLogCallsTask extends DefaultTask { } } } - // Check that previous line isn't an if statement for always-reported log levels. Arbitrary decision: we don't - // really care about checking for awkward constructions for WARN and above, so report a violation if the previous - // line contains an if for those levels. + //TODO remove me + String cause = "" + // Get the level of this log message. + // We don't really care about checking for method calls for WARN and more severe, the presumption + // is that they _should_ always be reported. - boolean dontReportLevels = level.equals("fatal") || level.equals("error") || level.equals("warn") + def level = "" + def lev = (line =~ "log\\.(.*?)\\(") + if (lev.find()) { + level = lev.group(1).toLowerCase().trim() + } + if (logLevels.contains("log." + level) == false) { + reportViolation(String.format("Found an unexpeted log level: %s, file: %s:%d" + , level, file.getAbsolutePath(), lineNumber)) + } + boolean persnicketyLevel = !(level.equals("fatal") || level.equals("error") || level.equals("warn")) + // Check that previous line isn't an if statement for always-reported log levels. // There's a convention to declare a member variable for whether a level is enabled and check that rather than // isDebugEnabled so we need to check both. String prevLine = previous.replaceAll("\\s+", "").toLowerCase() boolean prevLineNotIf = ((prevLine.contains("if(log.is" + level + "enabled") == false && prevLine.contains("if(" + level + ")") == false)) - if (dontReportLevels) { - // Only look (optionally) for plusses if surrounded by an if isLevelEnabled clause - // Otherwise, we're always OK with logging the message. - if (hasPlus && checkPlus) { - violation = true - } - } else { // less severe than warn, check for parens and plusses and correct - if (hasParens && prevLineNotIf) { - violation = true - } - if (hasPlus && checkPlus) { - violation = true - } - if (hasPlus && prevLineNotIf) { + if (hasPlus) { + cause = "hasPlus: " + hasPlus + violation = true + } + if (violation == false) { + def m = stripped =~ "\\(.*?\\)" + def hasParens = m.find() + + if (hasParens && prevLineNotIf && persnicketyLevel) { + cause = "hasParens " + hasParens + " prevLineNotIf " + prevLineNotIf + " pernicketyLevel " + persnicketyLevel violation = true } } + // Always report toString(). Note, this over-reports some constructs // but just add //logOK if it's really OK. - if (line.contains("toString(") == true) { - if (line.replaceAll(/Arrays.toString\(/, "").contains("toString(") && prevLineNotIf) { + if (violation == false) { + if (line.contains("toString(") == true && prevLineNotIf) { + cause = "Line contains toString" violation = true } } + if (violation) { - reportViolation(String.format("Suspicious logging call, Parameterize and possibly surround with 'if (log.is*Enabled) {..}'. Help at: 'gradlew helpValidateLogCalls' %s %s:%d" - , System.lineSeparator, file.getAbsolutePath(), lineNumber)) + reportViolation(String.format("cause: '%s' Suspicious logging call, Parameterize and possibly surround with 'if (log.is*Enabled) {..}'. Help at: 'gradlew helpValidateLogCalls' %s %s:%d" + , cause , System.lineSeparator, file.getAbsolutePath(), lineNumber)) } return } - // Require all our logger definitions lower case "log", except a couple of special ones. def checkLogName(File file, String line) { // It's many times faster to do check this way than use a regex if (line.contains("static ") && line.contains("getLogger") && line.contains(" log ") == false) { - switch (file.name) { - case "LoggerFactory.java": break - case "SolrCore.java": // Except for two know log files with a different name. - if (line.contains("requestLog") || line.contains("slowLog")) { - break - } - case "StartupLoggingUtils.java": - if (line.contains("getLoggerImplStr")) { - break; - } - default: - reportViolation("Change the logger name to lower-case 'log' in " + file.name + " " + line) - break; + String name = file.name + if (name.equals("LoggerFactory.java")) { + return } + if (name.equals("SolrCore.java") && (line.contains("requestLog") || line.contains("slowLog"))) { + return + } + if (name.equals("StartupLoggingUtils.java") && line.contains("getLoggerImplStr")) { + return + } + // Apparently the Hadoop code expectes upper-case LOG, so... + + if ((name.equals("HttpServer2.java") || name.equals("BlockPoolSlice.java") || name.equals("FileUtil.java")) + && line.contains(" LOG ")) { + return + } + + reportViolation("Change the logger name to lower-case 'log' in " + file.name + " " + line + " project" + project) } } def checkFile(File file) { - int state = 0 // 0 == not collecting a log line, 1 == collecting a log line, 2 == just collected the last. + int state = 0 int lineNumber = 0 StringBuilder sb = new StringBuilder(); @@ -296,7 +177,7 @@ class ValidateLogCallsTask extends DefaultTask { lineNumber++ checkLogName(file, line) switch (state) { - case 0: + case 0: // Not collecting a logging line. logLevels.each { if (line.contains(it)) { if (line.contains(");")) { @@ -310,8 +191,11 @@ class ValidateLogCallsTask extends DefaultTask { } break - case 1: // collecting - if (line.contains(");")) { + case 1: // collecting a log line + if (line.replaceFirst("//.*", " ") + .replaceFirst("/\\*.*?\\*/", " ") // replace embedded comments "/*....*/" + .replaceAll(/".*?"/, '""') // remove anything between quotes. This is a bit fragile if there are embedded double quotes. + .trim().endsWith(");")) { state = 2 } sb.append(line) @@ -322,12 +206,12 @@ class ValidateLogCallsTask extends DefaultTask { break } switch (state) { // It's just easier to do this here rather than read another line in the switch above. - case 0: + case 0: // Not collcting a log line prevLine = line.toLowerCase(); break; - case 1: + case 1: // collecting a logging line. break; - case 2: + case 2: // We've collected the complete log line. checkLine(file, sb.toString(), lineNumber, prevLine) state = 0 break; @@ -340,40 +224,10 @@ class ValidateLogCallsTask extends DefaultTask { @TaskAction def checkLogLines() { -// println srcDir - dirsToCheck.addAll(srcDir) - //TODO. This is here to check 8x on another branch since I can't run Gradle - // over 8x. Used periodically as a sanity check. -// new File("/Users/Erick/apache/solrJiras/master").traverse(type: groovy.io.FileType.FILES, nameFilter: ~/.*\.java/) { File it -> -// if (dirsToCheck.any { dir -> -// it.getCanonicalPath().contains(dir) -// }) { -// if (checkFile(it)) { -// println(it.getAbsolutePath()) -// // TODO. This just makes it much easier to get to the files during this mass migration! -// } -// } - -// new File("/Users/Erick/apache/SolrEOEFork").traverse(type: groovy.io.FileType.FILES, nameFilter: ~/.*\.java/) { File it -> -// if (checkFile(it)) { -// println(it.getAbsolutePath()) -// } -// } -// -//TODO -// println project - - // This is the real stuff project.sourceSets.each { srcSet -> srcSet.java.each { f -> - if (srcDir.contains("all")) { // TODO - checkFile(f) - } else if (dirsToCheck.any { - f.getCanonicalPath().contains(it) - }) { - checkFile(f) - } + checkFile(f) } } diff --git a/help/validateLogCalls.txt b/help/validateLogCalls.txt index 17291b827db..dc1777fd5a9 100644 --- a/help/validateLogCalls.txt +++ b/help/validateLogCalls.txt @@ -42,9 +42,8 @@ NOTES: we can use lambdas rather than "if log.is*Enabled". slf4j 2.0 will when released. -- warn, error, and fatal level messages are NOT flagged. However, if you want to - check these levels for including '+', specify '-PcheckPlus=true'. This is more - a style than functional check. +- warn, error, and fatal level messages are NOT flagged for parens, but will + be flagged for containg plusses that aren't simple concatenation. - You can get into some pretty convolued consructs trying to pass some of these checks. Adding //logok, with or without spaces will cause the line to pass @@ -57,9 +56,3 @@ NOTES: For a fuller discussion, see LUCENE-7788 and the other JIRAs linked from there. - -Until we get all the calls cleaned up, you MUST specify -PsrcDir=relative_path, - e.g. '-PsrcDir=solr/core/src/java/org/apache/solr/core'. This task will - recursively check all '*.java. files under that directory. Actually, it - just checks any file whose AbsolutePath contains the specification. May be - comma-delimited. diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/AbstractHandler.java b/lucene/luke/src/java/org/apache/lucene/luke/app/AbstractHandler.java index ab967a8d149..bca88f18632 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/app/AbstractHandler.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/app/AbstractHandler.java @@ -33,7 +33,9 @@ public abstract class AbstractHandler { public void addObserver(T observer) { observers.add(observer); - log.debug("{} registered.", observer.getClass().getName()); + if (log.isDebugEnabled()) { + log.debug("{} registered.", observer.getClass().getName()); + } } void notifyObservers() { diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/documents/AddDocumentDialogFactory.java b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/documents/AddDocumentDialogFactory.java index 0bbeb3eb6f5..1b9508f79eb 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/documents/AddDocumentDialogFactory.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/documents/AddDocumentDialogFactory.java @@ -365,7 +365,7 @@ public final class AddDocumentDialogFactory implements DialogOpener.DialogFactor } addDocument(doc); - log.info("Added document: {}", doc.toString()); + log.info("Added document: {}", doc); } @SuppressWarnings("unchecked") diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java index 471094223c1..c091bea5c47 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/ExportTermsDialogFactory.java @@ -248,10 +248,10 @@ public final class ExportTermsDialogFactory implements DialogOpener.DialogFactor try { filename = toolsModel.exportTerms(directory, field, selectedDelimiter); } catch (LukeException e) { - log.error("Error while exporting terms from field " + field, e); + log.error("Error while exporting terms from field {}", field, e); statusLbl.setText(MessageUtils.getLocalizedMessage("export.terms.label.error", e.getMessage())); } catch (Exception e) { - log.error("Error while exporting terms from field " + field, e); + log.error("Error while exporting terms from field {}", field, e); statusLbl.setText(MessageUtils.getLocalizedMessage("message.error.unknown")); throw e; } finally { diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java index e4b25296fb4..d2f6c9b5f86 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/models/documents/DocumentsImpl.java @@ -245,7 +245,9 @@ public final class DocumentsImpl extends LukeModel implements Documents { if (penum.nextDoc() == PostingsEnum.NO_MORE_DOCS) { // end of the iterator resetPostingsIterator(); - log.info("Reached the end of the postings iterator for term: {} in field: {}", BytesRefUtils.decode(tenum.term()), curField); + if (log.isInfoEnabled()) { + log.info("Reached the end of the postings iterator for term: {} in field: {}", BytesRefUtils.decode(tenum.term()), curField); + } return Optional.empty(); } else { return Optional.of(penum.docID()); diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/util/IndexUtils.java b/lucene/luke/src/java/org/apache/lucene/luke/models/util/IndexUtils.java index 71e8070af46..9b98e92b9ed 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/models/util/IndexUtils.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/models/util/IndexUtils.java @@ -93,7 +93,9 @@ public final class IndexUtils { throw new RuntimeException("No valid directory at the location: " + indexPath); } - log.info(String.format(Locale.ENGLISH, "IndexReaders (%d leaf readers) successfully opened. Index path=%s", readers.size(), indexPath)); + if (log.isInfoEnabled()) { + log.info(String.format(Locale.ENGLISH, "IndexReaders (%d leaf readers) successfully opened. Index path=%s", readers.size(), indexPath)); + } if (readers.size() == 1) { return readers.get(0); @@ -115,7 +117,9 @@ public final class IndexUtils { public static Directory openDirectory(String dirPath, String dirImpl) throws IOException { final Path path = FileSystems.getDefault().getPath(Objects.requireNonNull(dirPath)); Directory dir = openDirectory(path, dirImpl); - log.info(String.format(Locale.ENGLISH, "DirectoryReader successfully opened. Directory path=%s", dirPath)); + if (log.isInfoEnabled()) { + log.info(String.format(Locale.ENGLISH, "DirectoryReader successfully opened. Directory path=%s", dirPath)); + } return dir; } diff --git a/lucene/luke/src/java/org/apache/lucene/luke/models/util/twentynewsgroups/MessageFilesParser.java b/lucene/luke/src/java/org/apache/lucene/luke/models/util/twentynewsgroups/MessageFilesParser.java index 5a2fe739849..a11997450d1 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/models/util/twentynewsgroups/MessageFilesParser.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/models/util/twentynewsgroups/MessageFilesParser.java @@ -54,7 +54,7 @@ public class MessageFilesParser extends SimpleFileVisitor { } } } catch (IOException e) { - log.warn("Invalid file? " + file.toString()); + log.warn("Invalid file? {}", file); } return FileVisitResult.CONTINUE; } diff --git a/lucene/luke/src/java/org/apache/lucene/luke/util/reflection/SubtypeCollector.java b/lucene/luke/src/java/org/apache/lucene/luke/util/reflection/SubtypeCollector.java index 4f22a871434..de40a187eef 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/util/reflection/SubtypeCollector.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/util/reflection/SubtypeCollector.java @@ -84,7 +84,7 @@ final class SubtypeCollector implements Runnable { } } } catch (IOException e) { - log.error("Cannot load jar " + url.toString(), e); + log.error("Cannot load jar {}", url, e); } } } diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java index 199ebcf01a7..2fc68b44242 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java @@ -67,7 +67,7 @@ public abstract class StrategyTestCase extends SpatialTestCase { protected boolean storeShape = true; protected void executeQueries(SpatialMatchConcern concern, String... testQueryFile) throws IOException { - log.info("testing queried for strategy "+strategy); + log.info("testing queried for strategy "+strategy); // logOk for( String path : testQueryFile ) { Iterator testQueryIterator = getTestQueries(path, ctx); runTestQueries(testQueryIterator, concern); diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java index a38f5b6e7a7..aeb412a12b5 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java @@ -64,7 +64,7 @@ public class HeatmapFacetCounterTest extends StrategyTestCase { @After public void after() { - log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero"); + log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero"); // logOK } @Test diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java index c5b145f97c9..97f3b382c38 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java @@ -87,7 +87,7 @@ public class RandomSpatialOpFuzzyPrefixTreeTest extends StrategyTestCase { ((PrefixTreeStrategy) strategy).setPointsOnly(true); } - log.info("Strategy: " + strategy.toString()); + log.info("Strategy: " + strategy.toString()); // logOk } private void setupCtx2D(SpatialContext ctx) { diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/ClusteringComponent.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/ClusteringComponent.java index 5ff6a6dc62d..1c2b471a281 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/ClusteringComponent.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/ClusteringComponent.java @@ -179,7 +179,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar if (!engine.isAvailable()) { if (optional) { - log.info("Optional clustering engine not available: " + name); + log.info("Optional clustering engine not available: {}", name); } else { throw new SolrException(ErrorCode.SERVER_ERROR, "A required clustering engine failed to initialize, check the logs: " + name); @@ -192,11 +192,11 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar } else if (engine instanceof DocumentClusteringEngine) { previousEntry = documentClusteringEngines.put(name, (DocumentClusteringEngine) engine); } else { - log.warn("Unknown type of a clustering engine for class: " + engineClassName); + log.warn("Unknown type of a clustering engine for class: {}", engineClassName); continue; } if (previousEntry != null) { - log.warn("Duplicate clustering engine component named '" + name + "'."); + log.warn("Duplicate clustering engine component named '{}'.", name); } } } @@ -237,7 +237,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar Object clusters = engine.cluster(rb.getQuery(), solrDocList, docIds, rb.req); rb.rsp.add("clusters", clusters); } else { - log.warn("No engine named: " + name); + log.warn("No engine named: {}", name); } } @@ -257,7 +257,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar } rb.rsp.add("clusters", nl); } else { - log.warn("No engine named: " + name); + log.warn("No engine named: {}", name); } } } @@ -312,7 +312,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar sreq.params.set(CommonParams.FL, fl + sb.toString()); } } else { - log.warn("No engine named: " + name); + log.warn("No engine named: {}", name); } } } @@ -342,7 +342,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar Object clusters = engine.cluster(rb.getQuery(), solrDocList, docIds, rb.req); rb.rsp.add("clusters", clusters); } else { - log.warn("No engine named: " + name); + log.warn("No engine named: {}", name); } } } @@ -383,9 +383,11 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar } if (defaultEngine != null) { - log.info("Default engine for " + type + ": " + engineName + " [" + defaultEngine.getClass().getSimpleName() + "]"); + if (log.isInfoEnabled()) { + log.info("Default engine for {}: {} [{}]", type, engineName, defaultEngine.getClass().getSimpleName()); + } } else { - log.warn("No default engine for " + type + "."); + log.warn("No default engine for {}.", type); } } } diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java index b75b8c6deed..0e4628d71cf 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java @@ -153,15 +153,16 @@ public class CarrotClusteringEngine extends SearchClusteringEngine { // Load Carrot2-Workbench exported attribute XMLs based on the 'name' attribute // of this component. This by-name convention lookup is used to simplify configuring algorithms. String componentName = initParams.get(ClusteringEngine.ENGINE_NAME); - log.info("Initializing Clustering Engine '" + - MoreObjects.firstNonNull(componentName, "") + "'"); + if (log.isInfoEnabled()) { + log.info("Initializing Clustering Engine '{}'", MoreObjects.firstNonNull(componentName, "")); + } if (!Strings.isNullOrEmpty(componentName)) { IResource[] attributeXmls = resourceLookup.getAll(componentName + "-attributes.xml"); if (attributeXmls.length > 0) { if (attributeXmls.length > 1) { - log.warn("More than one attribute file found, first one will be used: " - + Arrays.toString(attributeXmls)); + log.warn("More than one attribute file found, first one will be used: {}" + , Arrays.toString(attributeXmls)); // logOk } withContextClassLoader(core.getResourceLoader().getClassLoader(), () -> { @@ -308,8 +309,8 @@ public class CarrotClusteringEngine extends SearchClusteringEngine { if (split.length == 2 && StringUtils.isNotBlank(split[0]) && StringUtils.isNotBlank(split[1])) { languageCodeMap.put(split[0], split[1]); } else { - log.warn("Unsupported format for " + CarrotParams.LANGUAGE_CODE_MAP - + ": '" + pair + "'. Skipping this mapping."); + log.warn("Unsupported format for {}: '{}'. Skipping this mapping." + , CarrotParams.LANGUAGE_CODE_MAP, pair); } } } @@ -457,8 +458,8 @@ public class CarrotClusteringEngine extends SearchClusteringEngine { if (split.length == 2 && StringUtils.isNotBlank(split[0]) && StringUtils.isNotBlank(split[1])) { customFields.put(split[0], split[1]); } else { - log.warn("Unsupported format for " + CarrotParams.CUSTOM_FIELD_NAME - + ": '" + customFieldSpec + "'. Skipping this field definition."); + log.warn("Unsupported format for {}: '{}'. Skipping this field definition." + , CarrotParams.CUSTOM_FIELD_NAME, customFieldSpec); } } } diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java index 584757c7cbc..ae03c3d9960 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/LuceneCarrot2StemmerFactory.java @@ -137,17 +137,17 @@ public class LuceneCarrot2StemmerFactory implements IStemmerFactory { .get(language); if (stemmerClazz == null) { - log.warn("No Snowball stemmer class for: " + language.name() - + ". Quality of clustering may be degraded."); + log.warn("No Snowball stemmer class for: {}. " + + "Quality of clustering may be degraded.", language.name()); return IdentityStemmer.INSTANCE; } try { return new SnowballStemmerAdapter(stemmerClazz.getConstructor().newInstance()); } catch (Exception e) { - log.warn("Could not instantiate snowball stemmer" - + " for language: " + language.name() - + ". Quality of clustering may be degraded.", e); + log.warn("Could not instantiate snowball stemmer for language: {}" + + ". Quality of clustering may be degraded." + , language.name(), e); return IdentityStemmer.INSTANCE; } diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java index 2cc67993a2e..4c0979929e9 100644 --- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java +++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/SolrResourceLocator.java @@ -60,7 +60,7 @@ class SolrResourceLocator implements IResourceLocator { @Override public IResource[] getAll(final String resource) { final String resourceName = carrot2ResourcesDir + "/" + resource; - log.debug("Looking for Solr resource: " + resourceName); + log.debug("Looking for Solr resource: {}", resourceName); InputStream resourceStream = null; final byte [] asBytes; @@ -68,8 +68,8 @@ class SolrResourceLocator implements IResourceLocator { resourceStream = resourceLoader.openResource(resourceName); asBytes = IOUtils.toByteArray(resourceStream); } catch (IOException e) { - log.debug("Resource not found in Solr's config: " + resourceName - + ". Using the default " + resource + " from Carrot JAR."); + log.debug("Resource not found in Solr's config: {}. Using the default {} from Carrot JAR." + , resourceName, resource); return new IResource[] {}; } finally { if (resourceStream != null) { @@ -81,7 +81,7 @@ class SolrResourceLocator implements IResourceLocator { } } - log.info("Loaded Solr resource: " + resourceName); + log.info("Loaded Solr resource: {}", resourceName); final IResource foundResource = new IResource() { @Override diff --git a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java index f19a4ca9ffd..d72f5fc0924 100644 --- a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java +++ b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java @@ -112,7 +112,7 @@ public class MailEntityProcessor extends EntityProcessorBase { String varName = ConfigNameConstants.IMPORTER_NS_SHORT + "." + cname + "." + DocBuilder.LAST_INDEX_TIME; Object varValue = context.getVariableResolver().resolve(varName); - log.info(varName+"="+varValue); + log.info("{}={}", varName, varValue); if (varValue != null && !"".equals(varValue) && !"".equals(getStringFromContext("fetchMailsSince", ""))) { @@ -123,21 +123,21 @@ public class MailEntityProcessor extends EntityProcessorBase { try { tmp = sinceDateParser.parse((String)varValue); if (tmp.getTime() == 0) { - log.info("Ignoring initial value "+varValue+" for "+varName+ - " in favor of fetchMailsSince config parameter"); + log.info("Ignoring initial value {} for {} in favor of fetchMailsSince config parameter" + , varValue, varName); tmp = null; // don't use this value } } catch (ParseException e) { // probably ok to ignore this since we have other options below // as we're just trying to figure out if the date is 0 - log.warn("Failed to parse "+varValue+" from "+varName+" due to: "+e); + log.warn("Failed to parse {} from {} due to", varValue, varName, e); } if (tmp == null) { // favor fetchMailsSince in this case because the value from // dataimport.properties is the default/init value varValue = getStringFromContext("fetchMailsSince", ""); - log.info("fetchMailsSince="+varValue); + log.info("fetchMailsSince={}", varValue); } } @@ -145,7 +145,7 @@ public class MailEntityProcessor extends EntityProcessorBase { varName = ConfigNameConstants.IMPORTER_NS_SHORT + "." + DocBuilder.LAST_INDEX_TIME; varValue = context.getVariableResolver().resolve(varName); - log.info(varName+"="+varValue); + log.info("{}={}", varName, varValue); } if (varValue != null && varValue instanceof String) { @@ -157,13 +157,13 @@ public class MailEntityProcessor extends EntityProcessorBase { if (lastIndexTime == null) lastIndexTime = getStringFromContext("fetchMailsSince", ""); - log.info("Using lastIndexTime "+lastIndexTime+" for mail import"); + log.info("Using lastIndexTime {} for mail import", lastIndexTime); this.fetchMailsSince = null; if (lastIndexTime != null && lastIndexTime.length() > 0) { try { fetchMailsSince = sinceDateParser.parse(lastIndexTime); - log.info("Parsed fetchMailsSince=" + lastIndexTime); + log.info("Parsed fetchMailsSince={}", lastIndexTime); } catch (ParseException e) { throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Invalid value for fetchMailSince: " + lastIndexTime, e); @@ -247,8 +247,8 @@ public class MailEntityProcessor extends EntityProcessorBase { addPartToDocument(mail, row, true); return row; } catch (Exception e) { - log.error("Failed to convert message [" + mail.toString() - + "] to document due to: " + e, e); + log.error("Failed to convert message [{}] to document due to: {}" + , mail, e, e); return null; } } @@ -269,9 +269,9 @@ public class MailEntityProcessor extends EntityProcessorBase { for (int i = 0; i < count; i++) addPartToDocument(mp.getBodyPart(i), row, false); } else { - log.warn("Multipart content is a not an instance of Multipart! Content is: " - + (content != null ? content.getClass().getName() : "null") - + ". Typically, this is due to the Java Activation JAR being loaded by the wrong classloader."); + log.warn("Multipart content is a not an instance of Multipart! Content is: {}" + + ". Typically, this is due to the Java Activation JAR being loaded by the wrong classloader." + , (content != null ? content.getClass().getName() : "null")); } } else if (part.isMimeType("message/rfc822")) { addPartToDocument((Part) part.getContent(), row, false); @@ -374,8 +374,8 @@ public class MailEntityProcessor extends EntityProcessorBase { if (("imap".equals(protocol) || "imaps".equals(protocol)) && "imap.gmail.com".equals(host)) { - log.info("Consider using 'gimaps' protocol instead of '" + protocol - + "' for enabling GMail specific extensions for " + host); + log.info("Consider using 'gimaps' protocol instead of '{}' for enabling GMail specific extensions for {}" + , protocol, host); } props.setProperty("mail.store.protocol", protocol); @@ -399,7 +399,7 @@ public class MailEntityProcessor extends EntityProcessorBase { } else { mailbox.connect(host, user, password); } - log.info("Connected to " + user + "'s mailbox on " + host); + log.info("Connected to {}'s mailbox on {}", user, host); return true; } catch (MessagingException e) { @@ -474,7 +474,7 @@ public class MailEntityProcessor extends EntityProcessorBase { .append(lineSep); config.append("includeSharedFolders : ").append(includeSharedFolders) .append(lineSep); - log.info(config.toString()); + log.info("{}", config); } class FolderIterator implements Iterator { @@ -515,14 +515,16 @@ public class MailEntityProcessor extends EntityProcessorBase { hasMessages = (next.getType() & Folder.HOLDS_MESSAGES) != 0; next.open(Folder.READ_ONLY); lastFolder = next; - log.info("Opened folder : " + fullName); + log.info("Opened folder : {}", fullName); } if (recurse && ((next.getType() & Folder.HOLDS_FOLDERS) != 0)) { Folder[] children = next.list(); log.info("Added its children to list : "); for (int i = children.length - 1; i >= 0; i--) { folders.add(0, children[i]); - log.info("child name : " + children[i].getFullName()); + if (log.isInfoEnabled()) { + log.info("child name : {}", children[i].getFullName()); + } } if (children.length == 0) log.info("NO children : "); } @@ -530,7 +532,7 @@ public class MailEntityProcessor extends EntityProcessorBase { } while (!hasMessages); return next; } catch (Exception e) { - log.warn("Failed to read folders due to: "+e); + log.warn("Failed to read folders due to: {}", e); // throw new // DataImportHandlerException(DataImportHandlerException.SEVERE, // "Folder open failed", e); @@ -568,13 +570,13 @@ public class MailEntityProcessor extends EntityProcessorBase { try { Folder[] ufldrs = mailbox.getUserNamespaces(null); if (ufldrs != null) { - log.info("Found " + ufldrs.length + " user namespace folders"); + log.info("Found {} user namespace folders", ufldrs.length); for (Folder ufldr : ufldrs) folders.add(ufldr); } } catch (MessagingException me) { - log.warn("Messaging exception retrieving user namespaces: " - + me.getMessage()); + log.warn("Messaging exception retrieving user namespaces: {}" + , me.getMessage()); } } @@ -582,13 +584,13 @@ public class MailEntityProcessor extends EntityProcessorBase { try { Folder[] sfldrs = mailbox.getSharedNamespaces(); if (sfldrs != null) { - log.info("Found " + sfldrs.length + " shared namespace folders"); + log.info("Found {} shared namespace folders", sfldrs.length); for (Folder sfldr : sfldrs) folders.add(sfldr); } } catch (MessagingException me) { - log.warn("Messaging exception retrieving shared namespaces: " - + me.getMessage()); + log.warn("Messaging exception retrieving shared namespaces: {}" + , me.getMessage()); } } @@ -620,14 +622,16 @@ public class MailEntityProcessor extends EntityProcessorBase { this.batchSize = batchSize; SearchTerm st = getSearchTerm(); - log.info("SearchTerm=" + st); + log.info("SearchTerm={}", st); if (st != null || folder instanceof GmailFolder) { doBatching = false; // Searching can still take a while even though we're only pulling // envelopes; unless you're using gmail server-side filter, which is // fast - log.info("Searching folder " + folder.getName() + " for messages"); + if (log.isInfoEnabled()) { + log.info("Searching folder {} for messages", folder.getName()); + } final RTimer searchTimer = new RTimer(); // If using GMail, speed up the envelope processing by doing a @@ -642,12 +646,14 @@ public class MailEntityProcessor extends EntityProcessorBase { if (folder instanceof GmailFolder && fetchMailsSince != null) { String afterCrit = "after:" + afterFmt.format(fetchMailsSince); - log.info("Added server-side gmail filter: " + afterCrit); + log.info("Added server-side gmail filter: {}", afterCrit); Message[] afterMessages = folder.search(new GmailRawSearchTerm( afterCrit)); - - log.info("GMail server-side filter found " + afterMessages.length - + " messages received " + afterCrit + " in folder " + folder.getName()); + + if (log.isInfoEnabled()) { + log.info("GMail server-side filter found {} messages received {} in folder " + , afterMessages.length, afterCrit, folder.getName()); + } // now pass in the server-side filtered messages to the local filter messagesInCurBatch = folder.search((st != null ? st : this), afterMessages); @@ -657,11 +663,13 @@ public class MailEntityProcessor extends EntityProcessorBase { totalInFolder = messagesInCurBatch.length; folder.fetch(messagesInCurBatch, fp); current = 0; - log.info("Total messages : " + totalInFolder); - log.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime()); + if (log.isInfoEnabled()) { + log.info("Total messages : {}", totalInFolder); + log.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime()); // logOk + } } else { totalInFolder = folder.getMessageCount(); - log.info("Total messages : " + totalInFolder); + log.info("Total messages : {}", totalInFolder); getNextBatch(batchSize, folder); } } catch (MessagingException e) { @@ -685,8 +693,8 @@ public class MailEntityProcessor extends EntityProcessorBase { folder.fetch(messagesInCurBatch, fp); current = 0; currentBatch++; - log.info("Current Batch : " + currentBatch); - log.info("Messages in this batch : " + messagesInCurBatch.length); + log.info("Current Batch : {}", currentBatch); + log.info("Messages in this batch : {}", messagesInCurBatch.length); } public boolean hasNext() { @@ -741,8 +749,10 @@ public class MailEntityProcessor extends EntityProcessorBase { @SuppressWarnings("serial") public SearchTerm getCustomSearch(final Folder folder) { - log.info("Building mail filter for messages in " + folder.getName() - + " that occur after " + sinceDateParser.format(since)); + if (log.isInfoEnabled()) { + log.info("Building mail filter for messages in {} that occur after {}" + , folder.getName(), sinceDateParser.format(since)); + } return new DateTerm(ComparisonTerm.GE, since) { private int matched = 0; private int seen = 0; @@ -761,16 +771,20 @@ public class MailEntityProcessor extends EntityProcessorBase { } else { String msgDateStr = (msgDate != null) ? sinceDateParser.format(msgDate) : "null"; String sinceDateStr = (since != null) ? sinceDateParser.format(since) : "null"; - log.debug("Message " + msg.getSubject() + " was received at [" + msgDateStr - + "], since filter is [" + sinceDateStr + "]"); + if (log.isDebugEnabled()) { + log.debug("Message {} was received at [{}], since filter is [{}]" + , msg.getSubject(), msgDateStr, sinceDateStr); + } } } catch (MessagingException e) { - log.warn("Failed to process message due to: "+e, e); + log.warn("Failed to process message due to: {}", e, e); } if (seen % 100 == 0) { - log.info("Matched " + matched + " of " + seen + " messages since: " - + sinceDateParser.format(since)); + if (log.isInfoEnabled()) { + log.info("Matched {} of {} messages since: {}" + , matched, seen, sinceDateParser.format(since)); + } } return isMatch; diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java index c1b4808eee7..03a30ab07a9 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java @@ -61,14 +61,14 @@ public class BinURLDataSource extends DataSource{ try { connectionTimeout = Integer.parseInt(cTimeout); } catch (NumberFormatException e) { - log.warn("Invalid connection timeout: " + cTimeout); + log.warn("Invalid connection timeout: {}", cTimeout); } } if (rTimeout != null) { try { readTimeout = Integer.parseInt(rTimeout); } catch (NumberFormatException e) { - log.warn("Invalid read timeout: " + rTimeout); + log.warn("Invalid read timeout: {}", rTimeout); } } } @@ -79,7 +79,7 @@ public class BinURLDataSource extends DataSource{ try { if (URIMETHOD.matcher(query).find()) url = new URL(query); else url = new URL(baseUrl + query); - log.debug("Accessing URL: " + url.toString()); + log.debug("Accessing URL: {}", url); URLConnection conn = url.openConnection(); conn.setConnectTimeout(connectionTimeout); conn.setReadTimeout(readTimeout); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java index c48c332616f..94b28481efb 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java @@ -266,7 +266,7 @@ public class DataImportHandler extends RequestHandlerBase implements try { return super.upload(document); } catch (RuntimeException e) { - log.error("Exception while adding: " + document, e); + log.error("Exception while adding: {}", document, e); return false; } } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java index 932849bbd9b..07ab9bcf51b 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java @@ -125,7 +125,7 @@ public class DataImporter { } else if(dataconfigFile!=null) { is = new InputSource(core.getResourceLoader().openResource(dataconfigFile)); is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(dataconfigFile)); - log.info("Loading DIH Configuration: " + dataconfigFile); + log.info("Loading DIH Configuration: {}", dataconfigFile); } if(is!=null) { config = loadDataConfig(is); @@ -148,7 +148,7 @@ public class DataImporter { for (int i = 0; i < dsConfig.size(); i++) { props.put(dsConfig.getName(i), dsConfig.getVal(i).toString()); } - log.info("Adding properties to datasource: " + props); + log.info("Adding properties to datasource: {}", props); dsProps.put((String) dsConfig.get("name"), props); } position++; diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java index 43e3af70795..8db73ca1909 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java @@ -265,7 +265,9 @@ public class DocBuilder { statusMessages.put(DataImporter.MSG.TOTAL_FAILED_DOCS, ""+ importStatistics.failedDocCount.get()); statusMessages.put("Time taken", getTimeElapsedSince(startTime.get())); - log.info("Time taken = " + getTimeElapsedSince(startTime.get())); + if (log.isInfoEnabled()) { + log.info("Time taken = {}", getTimeElapsedSince(startTime.get())); + } } catch(Exception e) { throw new RuntimeException(e); @@ -385,7 +387,7 @@ public class DocBuilder { key = map.get(keyName); } if(key == null) { - log.warn("no key was available for deleted pk query. keyName = " + keyName); + log.warn("no key was available for deleted pk query. keyName = {}", keyName); continue; } writer.deleteDoc(key); @@ -483,7 +485,7 @@ public class DocBuilder { if (seenDocCount <= reqParams.getStart()) continue; if (seenDocCount > reqParams.getStart() + reqParams.getRows()) { - log.info("Indexing stopped at docCount = " + importStatistics.docCount); + log.info("Indexing stopped at docCount = {}", importStatistics.docCount); break; } } @@ -759,9 +761,11 @@ public class DocBuilder { "deltaQuery has no column to resolve to declared primary key pk='%s'", pk)); } - log.info(String.format(Locale.ROOT, - "Resolving deltaQuery column '%s' to match entity's declared pk '%s'", - resolvedPk, pk)); + if (log.isInfoEnabled()) { + log.info(String.format(Locale.ROOT, + "Resolving deltaQuery column '%s' to match entity's declared pk '%s'", + resolvedPk, pk)); + } return resolvedPk; } @@ -796,7 +800,9 @@ public class DocBuilder { // identifying the modified rows for this entity Map> deltaSet = new HashMap<>(); - log.info("Running ModifiedRowKey() for Entity: " + epw.getEntity().getName()); + if (log.isInfoEnabled()) { + log.info("Running ModifiedRowKey() for Entity: {}", epw.getEntity().getName()); + } //get the modified rows in this entity String pk = epw.getEntity().getPk(); while (true) { @@ -844,8 +850,10 @@ public class DocBuilder { return new HashSet(); } - log.info("Completed ModifiedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deltaSet.size()); - log.info("Completed DeletedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deletedSet.size()); + if (log.isInfoEnabled()) { + log.info("Completed ModifiedRowKey for Entity: {} rows obtained: {}", epw.getEntity().getName(), deltaSet.size()); + log.info("Completed DeletedRowKey for Entity: {} rows obtained : {}", epw.getEntity().getName(), deletedSet.size()); // logOk + } myModifiedPks.addAll(deltaSet.values()); Set> parentKeyList = new HashSet<>(); @@ -870,7 +878,9 @@ public class DocBuilder { return new HashSet(); } } - log.info("Completed parentDeltaQuery for Entity: " + epw.getEntity().getName()); + if (log.isInfoEnabled()) { + log.info("Completed parentDeltaQuery for Entity: {}", epw.getEntity().getName()); + } if (epw.getEntity().isDocRoot()) deletedRows.addAll(deletedSet); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java index 984f0303e33..f63bfbd528f 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorWrapper.java @@ -124,7 +124,7 @@ public class EntityProcessorWrapper extends EntityProcessor { log.error(msg); wrapAndThrow(SEVERE, nsme,msg); } catch (Exception e) { - log.error("Unable to load Transformer: " + aTransArr, e); + log.error("Unable to load Transformer: {}", aTransArr, e); wrapAndThrow(SEVERE, e,"Unable to load Transformer: " + trans); } } @@ -172,7 +172,7 @@ public class EntityProcessorWrapper extends EntityProcessor { try { return meth.invoke(o, aRow); } catch (Exception e) { - log.warn("method invocation failed on transformer : " + trans, e); + log.warn("method invocation failed on transformer : {}", trans, e); throw new DataImportHandlerException(WARN, e); } } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java index 920472e2bfe..34df122687c 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java @@ -102,14 +102,14 @@ public class FileDataSource extends DataSource { File basePathFile; if (basePath == null) { basePathFile = new File(".").getAbsoluteFile(); - log.warn("FileDataSource.basePath is empty. " + - "Resolving to: " + basePathFile.getAbsolutePath()); + log.warn("FileDataSource.basePath is empty. Resolving to: {}" + , basePathFile.getAbsolutePath()); } else { basePathFile = new File(basePath); if (!basePathFile.isAbsolute()) { basePathFile = basePathFile.getAbsoluteFile(); - log.warn("FileDataSource.basePath is not absolute. Resolving to: " - + basePathFile.getAbsolutePath()); + log.warn("FileDataSource.basePath is not absolute. Resolving to: {}" + , basePathFile.getAbsolutePath()); } } @@ -117,7 +117,9 @@ public class FileDataSource extends DataSource { } if (file.isFile() && file.canRead()) { - log.debug("Accessing File: " + file.getAbsolutePath()); + if (log.isDebugEnabled()) { + log.debug("Accessing File: {}", file.getAbsolutePath()); + } return file; } else { throw new FileNotFoundException("Could not find file: " + query + diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java index 5eb35172466..3b81d21eaf2 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java @@ -87,7 +87,7 @@ public class JdbcDataSource extends if (batchSize == -1) batchSize = Integer.MIN_VALUE; } catch (NumberFormatException e) { - log.warn("Invalid batch size: " + bsz); + log.warn("Invalid batch size: {}", bsz); } } @@ -172,9 +172,10 @@ public class JdbcDataSource extends return factory = new Callable() { @Override public Connection call() throws Exception { - log.info("Creating a connection for entity " - + context.getEntityAttribute(DataImporter.NAME) + " with URL: " - + url); + if (log.isInfoEnabled()) { + log.info("Creating a connection for entity {} with URL: {}" + , context.getEntityAttribute(DataImporter.NAME), url); + } long start = System.nanoTime(); Connection c = null; @@ -205,8 +206,8 @@ public class JdbcDataSource extends throw new DataImportHandlerException(SEVERE, "Exception initializing SQL connection", e); } } - log.info("Time taken for getConnection(): " - + TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)); + log.info("Time taken for getConnection(): {}" + , TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)); return c; } @@ -316,11 +317,11 @@ public class JdbcDataSource extends try { Connection c = getConnection(); stmt = createStatement(c, batchSize, maxRows); - log.debug("Executing SQL: " + query); + log.debug("Executing SQL: {}", query); long start = System.nanoTime(); resultSet = executeStatement(stmt, query); - log.trace("Time taken for sql :" - + TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)); + log.trace("Time taken for sql : {}" + , TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)); setColNames(resultSet); } catch (Exception e) { close(); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java index 719decae906..0765bd67e94 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java @@ -165,7 +165,7 @@ public class RegexTransformer extends Transformer { } } } catch (Exception e) { - log.warn("Parsing failed for field : " + columnName, e); + log.warn("Parsing failed for field : {}", columnName, e); } } return l == null ? map: l; diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java index 2d5b078b87a..0b77c6ec6d3 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SimplePropertiesWriter.java @@ -214,7 +214,7 @@ public class SimplePropertiesWriter extends DIHProperties { existingProps.putAll(newProps); propOutput = new OutputStreamWriter(new FileOutputStream(getPersistFile()), StandardCharsets.UTF_8); existingProps.store(propOutput, null); - log.info("Wrote last indexed time to " + filename); + log.info("Wrote last indexed time to {}", filename); } catch (Exception e) { throw new DataImportHandlerException(DataImportHandlerException.SEVERE, "Unable to persist Index Start Time", e); @@ -235,9 +235,9 @@ public class SimplePropertiesWriter extends DIHProperties { filePath += filename; propInput = new FileInputStream(filePath); props.load(new InputStreamReader(propInput, StandardCharsets.UTF_8)); - log.info("Read " + filename); + log.info("Read {}", filename); } catch (Exception e) { - log.warn("Unable to read: " + filename); + log.warn("Unable to read: {}", filename); } finally { IOUtils.closeWhileHandlingException(propInput); } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java index 3964f3f2463..8e7624bd8b0 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java @@ -79,7 +79,7 @@ public class SolrWriter extends DIHWriterBase implements DIHWriter { command.commitWithin = commitWithin; processor.processAdd(command); } catch (Exception e) { - log.warn("Error creating document : " + d, e); + log.warn("Error creating document : {}", d, e); return false; } @@ -89,24 +89,24 @@ public class SolrWriter extends DIHWriterBase implements DIHWriter { @Override public void deleteDoc(Object id) { try { - log.info("Deleting document: " + id); + log.info("Deleting document: {}", id); DeleteUpdateCommand delCmd = new DeleteUpdateCommand(req); delCmd.setId(id.toString()); processor.processDelete(delCmd); } catch (IOException e) { - log.error("Exception while deleteing: " + id, e); + log.error("Exception while deleteing: {}", id, e); } } @Override public void deleteByQuery(String query) { try { - log.info("Deleting documents from Solr with query: " + query); + log.info("Deleting documents from Solr with query: {}", query); DeleteUpdateCommand delCmd = new DeleteUpdateCommand(req); delCmd.query = query; processor.processDelete(delCmd); } catch (IOException e) { - log.error("Exception while deleting by query: " + query, e); + log.error("Exception while deleting by query: {}", query, e); } } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java index 19c6d0f7476..8e0522a465b 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java @@ -61,7 +61,7 @@ public class SqlEntityProcessor extends EntityProcessorBase { } catch (DataImportHandlerException e) { throw e; } catch (Exception e) { - log.error( "The query failed '" + q + "'", e); + log.error( "The query failed '{}'", q, e); throw new DataImportHandlerException(DataImportHandlerException.SEVERE, e); } } @@ -103,8 +103,10 @@ public class SqlEntityProcessor extends EntityProcessorBase { String parentDeltaQuery = context.getEntityAttribute(PARENT_DELTA_QUERY); if (parentDeltaQuery == null) return null; - log.info("Running parentDeltaQuery for Entity: " - + context.getEntityAttribute("name")); + if (log.isInfoEnabled()) { + log.info("Running parentDeltaQuery for Entity: {}" + , context.getEntityAttribute("name")); + } initQuery(context.replaceTokens(parentDeltaQuery)); } return getNext(); @@ -119,7 +121,7 @@ public class SqlEntityProcessor extends EntityProcessorBase { String deltaImportQuery = context.getEntityAttribute(DELTA_IMPORT_QUERY); if(deltaImportQuery != null) return deltaImportQuery; } - log.warn("'deltaImportQuery' attribute is not specified for entity : "+ entityName); + log.warn("'deltaImportQuery' attribute is not specified for entity : {}", entityName); return getDeltaImportQuery(queryString); } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java index f655edd17d8..3a20028bb48 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java @@ -76,8 +76,8 @@ public class TemplateTransformer extends Transformer { } for (String v : variables) { if (resolver.resolve(v) == null) { - log.warn("Unable to resolve variable: " + v - + " while parsing expression: " + expr); + log.warn("Unable to resolve variable: {} while parsing expression: {}" + ,v , expr); resolvable = false; } } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java index 145ffc47eb7..0beed255d16 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java @@ -72,14 +72,14 @@ public class URLDataSource extends DataSource { try { connectionTimeout = Integer.parseInt(cTimeout); } catch (NumberFormatException e) { - log.warn("Invalid connection timeout: " + cTimeout); + log.warn("Invalid connection timeout: {}", cTimeout); } } if (rTimeout != null) { try { readTimeout = Integer.parseInt(rTimeout); } catch (NumberFormatException e) { - log.warn("Invalid read timeout: " + rTimeout); + log.warn("Invalid read timeout: {}", rTimeout); } } } @@ -91,7 +91,7 @@ public class URLDataSource extends DataSource { if (URIMETHOD.matcher(query).find()) url = new URL(query); else url = new URL(baseUrl + query); - log.debug("Accessing URL: " + url.toString()); + log.debug("Accessing URL: {}", url); URLConnection conn = url.openConnection(); conn.setConnectTimeout(connectionTimeout); diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java index c93b581fdfc..4c70d406b86 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java @@ -136,8 +136,9 @@ public class XPathEntityProcessor extends EntityProcessorBase { // some XML parsers are broken and don't close the byte stream (but they should according to spec) IOUtils.closeQuietly(xsltSource.getInputStream()); } - log.info("Using xslTransformer: " - + xslTransformer.getClass().getName()); + if (log.isInfoEnabled()) { + log.info("Using xslTransformer: {}", xslTransformer.getClass().getName()); + } } catch (Exception e) { throw new DataImportHandlerException(SEVERE, "Error initializing XSL ", e); @@ -293,10 +294,12 @@ public class XPathEntityProcessor extends EntityProcessorBase { if (ABORT.equals(onError)) { wrapAndThrow(SEVERE, e); } else if (SKIP.equals(onError)) { - if (log.isDebugEnabled()) log.debug("Skipping url : " + s, e); + if (log.isDebugEnabled()) { + log.debug("Skipping url : {}", s, e); + } wrapAndThrow(DataImportHandlerException.SKIP, e); } else { - log.warn("Failed for url : " + s, e); + log.warn("Failed for url : {}", s, e); rowIterator = Collections.EMPTY_LIST.iterator(); return; } @@ -313,7 +316,7 @@ public class XPathEntityProcessor extends EntityProcessorBase { } else if (SKIP.equals(onError)) { wrapAndThrow(DataImportHandlerException.SKIP, e); } else { - log.warn("Failed for url : " + s, e); + log.warn("Failed for url : {}", s, e); rowIterator = Collections.EMPTY_LIST.iterator(); return; } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java index 13daf49b085..1d546e9bc97 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java @@ -645,7 +645,7 @@ public class XPathRecordReader { } catch (IllegalArgumentException ex) { // Other implementations will likely throw this exception since "reuse-instance" // isimplementation specific. - log.debug("Unable to set the 'reuse-instance' property for the input chain: " + factory); + log.debug("Unable to set the 'reuse-instance' property for the input chain: {}", factory); } } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java index 64a776c9ccc..2d83202b3bb 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ZKPropertiesWriter.java @@ -74,7 +74,7 @@ public class ZKPropertiesWriter extends SimplePropertiesWriter { zkClient.setData(path, bytes, false); } catch (Exception e) { SolrZkClient.checkInterrupted(e); - log.warn("Could not persist properties to " + path + " :" + e.getClass(), e); + log.warn("Could not persist properties to {} : {}", path, e.getClass(), e); } } @@ -88,7 +88,7 @@ public class ZKPropertiesWriter extends SimplePropertiesWriter { } } catch (Exception e) { SolrZkClient.checkInterrupted(e); - log.warn("Could not read DIH properties from " + path + " :" + e.getClass(), e); + log.warn("Could not read DIH properties from {} : {}", path, e.getClass(), e); } return propertiesToMap(props); } diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java index 0ba13eac788..3832355a48c 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java @@ -106,7 +106,9 @@ public class DIHConfiguration { SchemaField sf = entry.getValue(); if (!fields.containsKey(sf.getName())) { if (sf.isRequired()) { - log.info(sf.getName() + " is a required field in SolrSchema . But not found in DataConfig"); + if (log.isInfoEnabled()) { + log.info("{} is a required field in SolrSchema . But not found in DataConfig", sf.getName()); + } } } } @@ -114,7 +116,9 @@ public class DIHConfiguration { EntityField fld = entry.getValue(); SchemaField field = getSchemaField(fld.getName()); if (field == null && !isSpecialCommand(fld.getName())) { - log.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema"); + if (log.isInfoEnabled()) { + log.info("The field :{} present in DataConfig does not have a counterpart in Solr Schema", fld.getName()); + } } } } diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java index 277404451c1..ee5ec82f9e5 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java @@ -112,7 +112,7 @@ public abstract class AbstractSqlEntityProcessorTestCase extends for(Map.Entry entry : props.entrySet()) { sb.append(" > key=" + entry.getKey() + " / value=" + entry.getValue() + "\n"); } - log.debug(sb.toString()); + log.debug("{}", sb); } } @@ -465,9 +465,10 @@ public abstract class AbstractSqlEntityProcessorTestCase extends // One second in the future ensures a change time after the last import (DIH // uses second precision only) Timestamp theTime = new Timestamp(System.currentTimeMillis() + 1000); - log.debug("PEOPLE UPDATE USING TIMESTAMP: " - + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT) - .format(theTime)); + if (log.isDebugEnabled()) { + log.debug("PEOPLE UPDATE USING TIMESTAMP: {}" + , new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(theTime)); + } try { conn = newConnection(); change = conn @@ -537,9 +538,10 @@ public abstract class AbstractSqlEntityProcessorTestCase extends // One second in the future ensures a change time after the last import (DIH // uses second precision only) Timestamp theTime = new Timestamp(System.currentTimeMillis() + 1000); - log.debug("COUNTRY UPDATE USING TIMESTAMP: " - + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT) - .format(theTime)); + if (log.isDebugEnabled()) { + log.debug("COUNTRY UPDATE USING TIMESTAMP: {}" + , new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(theTime)); + } try { conn = newConnection(); change = conn @@ -720,9 +722,10 @@ public abstract class AbstractSqlEntityProcessorTestCase extends s.executeUpdate("create table countries(code varchar(3) not null primary key, country_name varchar(50), deleted char(1) default 'N', last_modified timestamp not null)"); s.executeUpdate("create table people(id int not null primary key, name varchar(50), country_code char(2), deleted char(1) default 'N', last_modified timestamp not null)"); s.executeUpdate("create table people_sports(id int not null primary key, person_id int, sport_name varchar(50), deleted char(1) default 'N', last_modified timestamp not null)"); - log.debug("INSERTING DB DATA USING TIMESTAMP: " - + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT) - .format(theTime)); + if (log.isDebugEnabled()) { + log.debug("INSERTING DB DATA USING TIMESTAMP: {}", + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(theTime)); + } ps = conn .prepareStatement("insert into countries (code, country_name, last_modified) values (?,?,?)"); for (String[] country : countries) { diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java index 7b0e613ea75..8dd1b552e56 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java @@ -44,7 +44,7 @@ public class TestSortedMapBackedCache extends AbstractDIHCacheTestCase { List testData = extractDataByKeyLookup(cache, fieldNames); compareData(data, testData); } catch (Exception e) { - log.warn("Exception thrown: " + e.toString()); + log.warn("Exception thrown: {}", e); Assert.fail(); } finally { try { @@ -64,7 +64,7 @@ public class TestSortedMapBackedCache extends AbstractDIHCacheTestCase { List testData = extractDataInKeyOrder(cache, fieldNames); compareData(data, testData); } catch (Exception e) { - log.warn("Exception thrown: " + e.toString()); + log.warn("Exception thrown: {}", e); Assert.fail(); } finally { try { @@ -180,7 +180,7 @@ public class TestSortedMapBackedCache extends AbstractDIHCacheTestCase { compareData(newControlData, testData); } catch (Exception e) { - log.warn("Exception thrown: " + e.toString()); + log.warn("Exception thrown: {}", e); Assert.fail(); } finally { try { diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java index 1f22a8c25b5..9708cdcff34 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java @@ -49,7 +49,7 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC logPropertiesFile(); changeStuff(); int c = calculateDatabaseCalls(); - log.debug("testSingleEntity delta-import (" + c + " database calls expected)..."); + log.debug("testSingleEntity delta-import ({} database calls expected)...", c); singleEntity(c); validateChanges(); } @@ -76,7 +76,7 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC changeStuff(); int c = calculateDatabaseCalls(); simpleTransform(c); - log.debug("testWithSimpleTransformer delta-import (" + c + " database calls expected)..."); + log.debug("testWithSimpleTransformer delta-import ({} database calls expected)...", c); validateChanges(); } @Test @@ -86,7 +86,7 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC logPropertiesFile(); changeStuff(); int c = calculateDatabaseCalls(); - log.debug("testWithComplexTransformer delta-import (" + c + " database calls expected)..."); + log.debug("testWithComplexTransformer delta-import ({} database calls expected)...", c); complexTransform(c, personChanges.deletedKeys.length); validateChanges(); } @@ -94,7 +94,7 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC public void testChildEntities() throws Exception { log.debug("testChildEntities full-import..."); useParentDeltaQueryParam = random().nextBoolean(); - log.debug("using parent delta? " + useParentDeltaQueryParam); + log.debug("using parent delta? {}", useParentDeltaQueryParam); withChildEntities(false, true); logPropertiesFile(); changeStuff(); @@ -168,13 +168,13 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC sb.append(s).append(" "); } sb.append(" }"); - log.debug(sb.toString()); + log.debug("{}", sb); } } private void personChangesLog() { if(personChanges!=null) { - log.debug("person changes { " + personChanges.toString() + " } "); + log.debug("person changes [ {} ] ", personChanges); } } @Override diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java index cd5d4af576f..2ac1c2d22e5 100644 --- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java +++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java @@ -215,7 +215,7 @@ public class ExtractingDocumentLoader extends ContentStreamLoader { if(pwMapFile != null && pwMapFile.length() > 0) { InputStream is = req.getCore().getResourceLoader().openResource(pwMapFile); if(is != null) { - log.debug("Password file supplied: "+pwMapFile); + log.debug("Password file supplied: {}", pwMapFile); epp.parse(is); } } @@ -223,13 +223,13 @@ public class ExtractingDocumentLoader extends ContentStreamLoader { String resourcePassword = params.get(ExtractingParams.RESOURCE_PASSWORD); if(resourcePassword != null) { epp.setExplicitPassword(resourcePassword); - log.debug("Literal password supplied for file "+resourceName); + log.debug("Literal password supplied for file {}", resourceName); } parser.parse(inputStream, parsingHandler, metadata, context); } catch (TikaException e) { if(ignoreTikaException) log.warn(new StringBuilder("skip extracting text due to ").append(e.getLocalizedMessage()) - .append(". metadata=").append(metadata.toString()).toString()); + .append(". metadata=").append(metadata.toString()).toString()); // logOk else throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java index 41175a0987f..5ef5d3f1f3a 100644 --- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java +++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/RegexRulesPasswordProvider.java @@ -88,7 +88,7 @@ public class RegexRulesPasswordProvider implements PasswordProvider { continue; int sep = line.indexOf("="); if(sep <= 0) { - log.warn("Wrong format of password line "+linenum); + log.warn("Wrong format of password line {}", linenum); continue; } String pass = line.substring(sep+1).trim(); @@ -97,7 +97,7 @@ public class RegexRulesPasswordProvider implements PasswordProvider { Pattern pattern = Pattern.compile(regex); rules.put(pattern, pass); } catch(PatternSyntaxException pse) { - log.warn("Key of line "+linenum+" was not a valid regex pattern", pse); + log.warn("Key of line {} was not a valid regex pattern{}", linenum, pse); continue; } } diff --git a/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java b/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java index 487d37c948c..d4fbe600b0a 100644 --- a/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java +++ b/solr/contrib/langid/src/java/org/apache/solr/update/processor/LanguageIdentifierUpdateProcessor.java @@ -149,7 +149,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro if(keyVal.length == 2) { lcMap.put(keyVal[0], keyVal[1]); } else { - log.error("Unsupported format for langid.lcmap: "+mapping+". Skipping this mapping."); + log.error("Unsupported format for langid.lcmap: {}. Skipping this mapping.", mapping); } } } @@ -162,7 +162,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro if(keyVal.length == 2) { mapLcMap.put(keyVal[0], keyVal[1]); } else { - log.error("Unsupported format for langid.map.lcmap: "+mapping+". Skipping this mapping."); + log.error("Unsupported format for langid.map.lcmap: {}. Skipping this mapping.", mapping); } } } @@ -175,13 +175,15 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro if (maxFieldValueChars > maxTotalChars) { if (maxTotalChars == MAX_TOTAL_CHARS_DEFAULT) { // If the user specified only maxFieldValueChars, make maxTotalChars the same as it - log.warn(MAX_FIELD_VALUE_CHARS + " (" + maxFieldValueChars + ") is less than " + MAX_TOTAL_CHARS + " (" - + maxTotalChars + "). Setting " + MAX_TOTAL_CHARS + " to " + maxFieldValueChars + "."); + log.warn("{} ({}) is less than {} ({}). Setting {} to {}." + , MAX_FIELD_VALUE_CHARS, maxFieldValueChars, MAX_TOTAL_CHARS + , maxTotalChars, MAX_TOTAL_CHARS, maxFieldValueChars); maxTotalChars = maxFieldValueChars; } else { // If the user specified maxTotalChars, make maxFieldValueChars the same as it - log.warn(MAX_FIELD_VALUE_CHARS + " (" + maxFieldValueChars + ") is less than " + MAX_TOTAL_CHARS + " (" - + maxTotalChars + "). Setting " + MAX_FIELD_VALUE_CHARS + " to " + maxTotalChars + "."); + log.warn("{} ({}) is less than {} ({}). Setting {} to {}." + , MAX_FIELD_VALUE_CHARS, maxFieldValueChars, MAX_TOTAL_CHARS + , maxTotalChars, MAX_FIELD_VALUE_CHARS, maxTotalChars ); maxFieldValueChars = maxTotalChars; } } @@ -219,10 +221,14 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro List languagelist = detectLanguage(doc); docLang = resolveLanguage(languagelist, fallbackLang); docLangs.add(docLang); - log.debug("Detected main document language from fields "+ Arrays.toString(inputFields) +": "+docLang); + if (log.isDebugEnabled()) { + log.debug("Detected main document language from fields {}: {}", Arrays.toString(inputFields), docLang); + } if(doc.containsKey(langField) && overwrite) { - log.debug("Overwritten old value "+doc.getFieldValue(langField)); + if (log.isDebugEnabled()) { + log.debug("Overwritten old value {}", doc.getFieldValue(langField)); + } } if(langField != null && langField.length() != 0) { doc.setField(langField, docLang); @@ -231,7 +237,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro // langField is set, we sanity check it against whitelist and fallback docLang = resolveLanguage(doc.getFieldValue(langField).toString(), fallbackLang); docLangs.add(docLang); - log.debug("Field "+langField+" already contained value "+docLang+", not overwriting."); + log.debug("Field {} already contained value {}, not overwriting.", langField, docLang); } if(enableMapping) { @@ -242,15 +248,17 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro List languagelist = detectLanguage(solrDocReader(doc, new String[]{fieldName})); fieldLang = resolveLanguage(languagelist, docLang); docLangs.add(fieldLang); - log.debug("Mapping field "+fieldName+" using individually detected language "+fieldLang); + log.debug("Mapping field {} using individually detected language {}", fieldName, fieldLang); } else { fieldLang = docLang; - log.debug("Mapping field "+fieldName+" using document global language "+fieldLang); + log.debug("Mapping field {} using document global language {}", fieldName, fieldLang); } String mappedOutputField = getMappedField(fieldName, fieldLang); if (mappedOutputField != null) { - log.debug("Mapping field {} to {}", doc.getFieldValue(docIdField), fieldLang); + if (log.isDebugEnabled()) { + log.debug("Mapping field {} to {}", doc.getFieldValue(docIdField), fieldLang); + } SolrInputField inField = doc.getField(fieldName); doc.setField(mappedOutputField, inField.getValue()); if(!mapKeepOrig) { @@ -282,12 +290,12 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro for(String field : fallbackFields) { if(doc.containsKey(field)) { lang = (String) doc.getFieldValue(field); - log.debug("Language fallback to field "+field); + log.debug("Language fallback to field {}", field); break; } } if(lang == null) { - log.debug("Language fallback to value "+fallbackValue); + log.debug("Language fallback to value {}", fallbackValue); lang = fallbackValue; } return lang; @@ -337,7 +345,9 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro DetectedLanguage lang = languages.get(0); String normalizedLang = normalizeLangCode(lang.getLangCode()); if(langWhitelist.isEmpty() || langWhitelist.contains(normalizedLang)) { - log.debug("Language detected {} with certainty {}", normalizedLang, lang.getCertainty()); + if (log.isDebugEnabled()) { + log.debug("Language detected {} with certainty {}", normalizedLang, lang.getCertainty()); + } if(lang.getCertainty() >= threshold) { langStr = normalizedLang; } else { @@ -345,7 +355,9 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro langStr = fallbackLang; } } else { - log.debug("Detected a language not in whitelist ({}), using fallback {}", lang.getLangCode(), fallbackLang); + if (log.isDebugEnabled()) { + log.debug("Detected a language not in whitelist ({}), using fallback {}", lang.getLangCode(), fallbackLang); + } langStr = fallbackLang; } } @@ -366,7 +378,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro protected String normalizeLangCode(String langCode) { if (lcMap.containsKey(langCode)) { String lc = lcMap.get(langCode); - log.debug("Doing langcode normalization mapping from "+langCode+" to "+lc); + log.debug("Doing langcode normalization mapping from {} to {}", langCode, lc); return lc; } return langCode; @@ -389,7 +401,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro log.warn("Unsuccessful field name mapping from {} to {}, field does not exist and enforceSchema=true; skipping mapping.", currentField, newFieldName); return null; } else { - log.debug("Doing mapping from "+currentField+" with language "+language+" to field "+newFieldName); + log.debug("Doing mapping from {} with language {} to field {}", currentField, language, newFieldName); } return newFieldName; } diff --git a/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java b/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java index 5a43bd52e79..ecce415421a 100644 --- a/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java +++ b/solr/contrib/langid/src/java/org/apache/solr/update/processor/TikaLanguageIdentifierUpdateProcessor.java @@ -58,7 +58,10 @@ public class TikaLanguageIdentifierUpdateProcessor extends LanguageIdentifierUpd certainty = 0d; DetectedLanguage language = new DetectedLanguage(identifier.getLanguage(), certainty); languages.add(language); - log.debug("Language detected as "+language+" with a certainty of "+language.getCertainty()+" (Tika distance="+identifier.toString()+")"); + if (log.isDebugEnabled()) { + log.debug("Language detected as {} with a certainty of {} (Tika distance={})" + , language, language.getCertainty(), identifier); + } } else { log.debug("No input text to detect language from, returning empty list"); } diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java index 60cabccab55..2fc286aab54 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/store/rest/ManagedModelStore.java @@ -141,7 +141,9 @@ public class ManagedModelStore extends ManagedResource implements ManagedResourc public synchronized void addModel(LTRScoringModel ltrScoringModel) throws ModelException { try { - log.info("adding model {}", ltrScoringModel.getName()); + if (log.isInfoEnabled()) { + log.info("adding model {}", ltrScoringModel.getName()); + } store.addModel(ltrScoringModel); } catch (final ModelException e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java index 24c77cafc77..e921bcbb17d 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java @@ -248,8 +248,10 @@ public class TestLTRReRankingPipeline extends SolrTestCase { hits = new TopDocs(hits.totalHits, slice); hits = rescorer.rescore(searcher, hits, topN); for (int i = topN - 1, j = 0; i >= 0; i--, j++) { - log.info("doc {} in pos {}", searcher.doc(hits.scoreDocs[j].doc) - .get("id"), j); + if (log.isInfoEnabled()) { + log.info("doc {} in pos {}", searcher.doc(hits.scoreDocs[j].doc) + .get("id"), j); + } assertEquals(i, Integer.parseInt(searcher.doc(hits.scoreDocs[j].doc).get("id"))); diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java index 9d22cf4e9f8..8b26bcee145 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java @@ -159,13 +159,15 @@ public class TestRerankBase extends RestTestBase { } if (fstore.exists()) { - log.info("remove feature store config file in {}", - fstore.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("remove feature store config file in {}", fstore.getAbsolutePath()); + } Files.delete(fstore.toPath()); } if (mstore.exists()) { - log.info("remove model store config file in {}", - mstore.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("remove model store config file in {}", mstore.getAbsolutePath()); + } Files.delete(mstore.toPath()); } if (!solrconfig.equals("solrconfig.xml")) { diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java index cf282156d0f..04b0b75278f 100644 --- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java +++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java @@ -208,7 +208,7 @@ public class SolrExporter { solrExporter.start(); log.info("Solr Prometheus Exporter is running"); } catch (IOException e) { - log.error("Failed to start Solr Prometheus Exporter: " + e.toString()); + log.error("Failed to start Solr Prometheus Exporter: ", e); } catch (ArgumentParserException e) { parser.handleError(e); } diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java index 74f2772a611..096c2484875 100644 --- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java +++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/scraper/SolrScraper.java @@ -106,7 +106,7 @@ public abstract class SolrScraper implements Closeable { queryResponse = client.request(queryRequest, query.getCollection().get()); } } catch (SolrServerException | IOException e) { - log.error("failed to request: " + queryRequest.getPath() + " " + e.getMessage()); + log.error("failed to request: {} {}", queryRequest.getPath(), e.getMessage()); } JsonNode jsonNode = OBJECT_MAPPER.readTree((String) queryResponse.get("response")); @@ -164,7 +164,7 @@ public abstract class SolrScraper implements Closeable { name, labelNames, labelValues, value)); } } catch (JsonQueryException e) { - log.error("Error apply JSON query={} to result", jsonQuery.toString(), e); + log.error("Error apply JSON query={} to result", jsonQuery, e); scrapeErrorTotal.inc(); } } diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java index a598ba291e8..17410398e96 100644 --- a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java +++ b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java @@ -103,11 +103,11 @@ public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAwar if (templateBaseDir != null && !templateBaseDir.isEmpty()) { fileResourceLoaderBaseDir = new File(templateBaseDir).getAbsoluteFile(); if (!fileResourceLoaderBaseDir.exists()) { // "*not* exists" condition! - log.warn(TEMPLATE_BASE_DIR + " specified does not exist: " + fileResourceLoaderBaseDir); + log.warn("{} specified does not exist: {}", TEMPLATE_BASE_DIR, fileResourceLoaderBaseDir); fileResourceLoaderBaseDir = null; } else { if (!fileResourceLoaderBaseDir.isDirectory()) { // "*not* a directory" condition - log.warn(TEMPLATE_BASE_DIR + " specified is not a directory: " + fileResourceLoaderBaseDir); + log.warn("{} specified is not a directory: {}", TEMPLATE_BASE_DIR, fileResourceLoaderBaseDir); fileResourceLoaderBaseDir = null; } } @@ -132,7 +132,7 @@ public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAwar try { velocityInitProps.load(new InputStreamReader(core.getResourceLoader().openResource(initPropertiesFileName), StandardCharsets.UTF_8)); } catch (IOException e) { - log.warn("Error loading " + PROPERTIES_FILE + " specified property file: " + initPropertiesFileName, e); + log.warn("Error loading {} specified property file: {}", PROPERTIES_FILE, initPropertiesFileName, e); } } } diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java index 8a3df781d7c..6ff70f9a564 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java @@ -326,7 +326,8 @@ public class Assign { public static List getNodesForNewReplicas(ClusterState clusterState, String collectionName, String shard, int nrtReplicas, int tlogReplicas, int pullReplicas, Object createNodeSet, SolrCloudManager cloudManager) throws IOException, InterruptedException, AssignmentException { - log.debug("getNodesForNewReplicas() shard: {} , nrtReplicas : {} , tlogReplicas: {} , pullReplicas: {} , createNodeSet {}", shard, nrtReplicas, tlogReplicas, pullReplicas, createNodeSet); + log.debug("getNodesForNewReplicas() shard: {} , nrtReplicas : {} , tlogReplicas: {} , pullReplicas: {} , createNodeSet {}" + , shard, nrtReplicas, tlogReplicas, pullReplicas, createNodeSet); DocCollection coll = clusterState.getCollection(collectionName); Integer maxShardsPerNode = coll.getMaxShardsPerNode() == -1 ? Integer.MAX_VALUE : coll.getMaxShardsPerNode(); List createNodeList = null; diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index 30c3f83a10b..f332837d92c 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -896,9 +896,9 @@ public class CoreContainer { private void warnUsersOfInsecureSettings() { if (authenticationPlugin == null || authorizationPlugin == null) { - log.warn("Not all security plugins configured! authentication={} authorization={}. Solr is only as secure as {}{}" - , "you make it. Consider configuring authentication/authorization before exposing Solr to users internal or " - , "external. See https://s.apache.org/solrsecurity for more info", + log.warn("Not all security plugins configured! authentication={} authorization={}. Solr is only as secure as " + + "you make it. Consider configuring authentication/authorization before exposing Solr to users internal or " + + "external. See https://s.apache.org/solrsecurity for more info", (authenticationPlugin != null) ? "enabled" : "disabled", (authorizationPlugin != null) ? "enabled" : "disabled"); } diff --git a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java index f49604f683a..56ad7312559 100644 --- a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java +++ b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java @@ -233,8 +233,7 @@ public class FileUtil { } final boolean ex = f.exists(); if (doLog && ex) { - LOG.warn("Failed to delete file or dir [" - + f.getAbsolutePath() + "]: it still exists."); + LOG.warn("Failed to delete file or dir [{}]: it still exists.", f.getAbsolutePath()); } return !ex; } @@ -747,7 +746,9 @@ public class FileUtil { new IOUtils.NullOutputStream()); } } catch (IOException e) { - LOG.debug(e.getMessage()); + if (LOG.isDebugEnabled()) { + LOG.debug(e.getMessage()); + } } }); Future error = executor.submit(() -> { @@ -770,7 +771,9 @@ public class FileUtil { new IOUtils.NullOutputStream()); } } catch (IOException e) { - LOG.debug(e.getMessage()); + if (LOG.isDebugEnabled()) { + LOG.debug(e.getMessage()); + } } }); @@ -1042,8 +1045,7 @@ public class FileUtil { public static int symLink(String target, String linkname) throws IOException{ if (target == null || linkname == null) { - LOG.warn("Can not create a symLink with a target = " + target - + " and link =" + linkname); + LOG.warn("Can not create a symLink with a target = {} and link = {}", target, linkname); return 1; } @@ -1080,14 +1082,13 @@ public class FileUtil { + "administrators and all non-administrators from creating symbolic links. " + "This behavior can be changed in the Local Security Policy management console"); } else if (returnVal != 0) { - LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed " - + returnVal + " with: " + ec.getMessage()); + LOG.warn("Command '{}' failed {} with: {}",StringUtils.join(" ", cmd) + , returnVal, ec.getMessage()); } return returnVal; } catch (IOException e) { if (LOG.isDebugEnabled()) { - LOG.debug("Error while create symlink " + linkname + " to " + target - + "." + " Exception: " + StringUtils.stringifyException(e)); + LOG.debug("Error while create symlink {} to {}. Exception: {}", linkname, target, StringUtils.stringifyException(e)); } throw e; } @@ -1126,8 +1127,7 @@ public class FileUtil { shExec.execute(); }catch(IOException e) { if(LOG.isDebugEnabled()) { - LOG.debug("Error while changing permission : " + filename - +" Exception: " + StringUtils.stringifyException(e)); + LOG.debug("Error while changing permission : {} Exception: {}", filename, StringUtils.stringifyException(e)); } } return shExec.getExitCode(); @@ -1501,7 +1501,7 @@ public class FileUtil { // then this is acceptable. If it returns false due to some other I/O // error, then this method will fail later with an IOException while saving // the jar. - LOG.debug("mkdirs false for " + workingDir + ", execution will continue"); + LOG.debug("mkdirs false for {}, execution will continue", workingDir); } StringBuilder unexpandedWildcardClasspath = new StringBuilder(); diff --git a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java index bfc18a3073d..9cba3fb1ff3 100644 --- a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java +++ b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java @@ -418,7 +418,7 @@ public class BlockPoolSlice { try { fileIoProvider.mkdirsWithExistsCheck(volume, targetDir); } catch(IOException ioe) { - LOG.warn("Failed to mkdirs " + targetDir); + LOG.warn("Failed to mkdirs {}", targetDir); continue; } @@ -426,8 +426,7 @@ public class BlockPoolSlice { try { fileIoProvider.rename(volume, metaFile, targetMetaFile); } catch (IOException e) { - LOG.warn("Failed to move meta file from " - + metaFile + " to " + targetMetaFile, e); + LOG.warn("Failed to move meta file from {} to {}", metaFile, targetMetaFile, e); continue; } @@ -435,8 +434,7 @@ public class BlockPoolSlice { try { fileIoProvider.rename(volume, blockFile, targetBlockFile); } catch (IOException e) { - LOG.warn("Failed to move block file from " - + blockFile + " to " + targetBlockFile, e); + LOG.warn("Failed to move block file from {} to {}", blockFile, targetBlockFile, e); continue; } @@ -444,7 +442,7 @@ public class BlockPoolSlice { ++numRecovered; } else { // Failure should be rare. - LOG.warn("Failed to move " + blockFile + " to " + targetDir); + LOG.warn("Failed to move {} to {}", blockFile, targetDir); } } } @@ -655,8 +653,7 @@ public class BlockPoolSlice { replicaToDelete = (replicaToKeep == replica1) ? replica2 : replica1; if (LOG.isDebugEnabled()) { - LOG.debug("resolveDuplicateReplicas decide to keep " + replicaToKeep - + ". Will try to delete " + replicaToDelete); + LOG.debug("resolveDuplicateReplicas decide to keep {}. Will try to delete {}", replicaToKeep, replicaToDelete); } return replicaToDelete; } @@ -664,10 +661,10 @@ public class BlockPoolSlice { private void deleteReplica(final ReplicaInfo replicaToDelete) { // Delete the files on disk. Failure here is okay. if (!replicaToDelete.deleteBlockData()) { - LOG.warn("Failed to delete block file for replica " + replicaToDelete); + LOG.warn("Failed to delete block file for replica {}", replicaToDelete); } if (!replicaToDelete.deleteMetadata()) { - LOG.warn("Failed to delete meta file for replica " + replicaToDelete); + LOG.warn("Failed to delete meta file for replica {}", replicaToDelete); } } @@ -765,18 +762,21 @@ public class BlockPoolSlice { File replicaFile = new File(currentDir, REPLICA_CACHE_FILE); // Check whether the file exists or not. if (!replicaFile.exists()) { - LOG.info("Replica Cache file: "+ replicaFile.getPath() + - " doesn't exist "); + if (LOG.isInfoEnabled()) { + LOG.info("Replica Cache file: {} doesn't exist", replicaFile.getPath()); + } return false; } long fileLastModifiedTime = replicaFile.lastModified(); if (System.currentTimeMillis() > fileLastModifiedTime + replicaCacheExpiry) { - LOG.info("Replica Cache file: " + replicaFile.getPath() + - " has gone stale"); + if (LOG.isInfoEnabled()) { + LOG.info("Replica Cache file: {} has gone stale", replicaFile.getPath()); + } // Just to make findbugs happy if (!replicaFile.delete()) { - LOG.info("Replica Cache file: " + replicaFile.getPath() + - " cannot be deleted"); + if (LOG.isInfoEnabled()) { + LOG.info("Replica Cache file: {} cannot be deleted", replicaFile.getPath()); + } } return false; } @@ -814,14 +814,16 @@ public class BlockPoolSlice { iter.remove(); volumeMap.add(bpid, info); } - LOG.info("Successfully read replica from cache file : " - + replicaFile.getPath()); + if (LOG.isInfoEnabled()) { + LOG.info("Successfully read replica from cache file : {}", replicaFile.getPath()); + } return true; } catch (Exception e) { // Any exception we need to revert back to read from disk // Log the error and return false - LOG.info("Exception occurred while reading the replicas cache file: " - + replicaFile.getPath(), e ); + if (LOG.isInfoEnabled()) { + LOG.info("Exception occurred while reading the replicas cache file: {}", replicaFile.getPath(), e); + } return false; } finally { @@ -829,8 +831,9 @@ public class BlockPoolSlice { IOUtils.closeStream(inputStream); if (!fileIoProvider.delete(volume, replicaFile)) { - LOG.info("Failed to delete replica cache file: " + - replicaFile.getPath()); + if (LOG.isInfoEnabled()) { + LOG.info("Failed to delete replica cache file: {}", replicaFile.getPath()); + } } } } @@ -922,8 +925,7 @@ public class BlockPoolSlice { addToReplicasMap(volumeMap, dir, lazyWriteReplicaMap, isFinalized, exceptions, subTaskQueue); } catch (IOException e) { - LOG.warn("Caught exception while adding replicas from " + volume - + " in subtask. Will throw later.", e); + LOG.warn("Caught exception while adding replicas from {} in subtask. Will throw later.", volume, e); exceptions.add(e); } } diff --git a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java index cd3c4a324eb..0767d4f8f85 100644 --- a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java +++ b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java @@ -523,7 +523,7 @@ public final class HttpServer2 implements FilterContainer { if(null != excludeCiphers && !excludeCiphers.isEmpty()) { sslContextFactory.setExcludeCipherSuites( StringUtils.getTrimmedStrings(excludeCiphers)); - LOG.info("Excluded Cipher List:" + excludeCiphers); + LOG.info("Excluded Cipher List:{}", excludeCiphers); } conn.addFirstConnectionFactory(new SslConnectionFactory(sslContextFactory, @@ -610,7 +610,7 @@ public final class HttpServer2 implements FilterContainer { if (pathSpecs != null) { for (String path : pathSpecs) { - LOG.info("adding path spec: " + path); + LOG.info("adding path spec: {}", path); addFilterPathMapping(path, webAppContext); } } @@ -782,8 +782,8 @@ public final class HttpServer2 implements FilterContainer { */ public void addJerseyResourcePackage(final String packageName, final String pathSpec) { - LOG.info("addJerseyResourcePackage: packageName=" + packageName - + ", pathSpec=" + pathSpec); + LOG.info("addJerseyResourcePackage: packageName={}, pathcpec={}" + , packageName, pathSpec); final ServletHolder sh = new ServletHolder(ServletContainer.class); sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass", "com.sun.jersey.api.core.PackagesResourceConfig"); @@ -845,9 +845,10 @@ public final class HttpServer2 implements FilterContainer { for (int i = 0; i < servletMappings.length; i++) { if (servletMappings[i].containsPathSpec(pathSpec)) { if (LOG.isDebugEnabled()) { - LOG.debug("Found existing " + servletMappings[i].getServletName() + - " servlet at path " + pathSpec + "; will replace mapping" + - " with " + holder.getName() + " servlet"); + LOG.debug("Found existing {} servlet at path {}; will replace mapping with {} servlet" + , servletMappings[i].getServletName() + , pathSpec + , holder.getName()); } ServletMapping[] newServletMappings = ArrayUtil.removeFromArray(servletMappings, servletMappings[i]); @@ -859,7 +860,7 @@ public final class HttpServer2 implements FilterContainer { webAppContext.addServlet(holder, pathSpec); if(requireAuth && UserGroupInformation.isSecurityEnabled()) { - LOG.info("Adding Kerberos (SPNEGO) filter to " + name); + LOG.info("Adding Kerberos (SPNEGO) filter to {}", name); ServletHandler handler = webAppContext.getServletHandler(); FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); @@ -894,9 +895,8 @@ public final class HttpServer2 implements FilterContainer { for (int i = 0; i < servletMappings.length; i++) { if (servletMappings[i].containsPathSpec(pathSpec)) { if (LOG.isDebugEnabled()) { - LOG.debug("Found existing " + servletMappings[i].getServletName() + - " servlet at path " + pathSpec + "; will replace mapping" + - " with " + sh.getName() + " servlet"); + LOG.debug("Found existing {} servlet at path {}; will replace mapping with {} servlet" + , servletMappings[i].getServletName(), pathSpec, sh.getName()); } ServletMapping[] newServletMappings = ArrayUtil.removeFromArray(servletMappings, servletMappings[i]); @@ -936,9 +936,10 @@ public final class HttpServer2 implements FilterContainer { final String[] USER_FACING_URLS = { "*.html", "*.jsp" }; FilterMapping fmap = getFilterMapping(name, USER_FACING_URLS); defineFilter(webAppContext, filterHolder, fmap); - LOG.info( - "Added filter " + name + " (class=" + classname + ") to context " - + webAppContext.getDisplayName()); + if (LOG.isInfoEnabled()) { + LOG.info("Added filter {} (class={}) to context {}", name, classname + , webAppContext.getDisplayName()); + } final String[] ALL_URLS = { "/*" }; fmap = getFilterMapping(name, ALL_URLS); for (Map.Entry e @@ -946,8 +947,10 @@ public final class HttpServer2 implements FilterContainer { if (e.getValue()) { ServletContextHandler ctx = e.getKey(); defineFilter(ctx, filterHolder, fmap); - LOG.info("Added filter " + name + " (class=" + classname - + ") to context " + ctx.getDisplayName()); + if (LOG.isInfoEnabled()) { + LOG.info("Added filter {} (class={}) to context {}" + , name, classname, ctx.getDisplayName()); + } } } filterNames.add(name); @@ -963,7 +966,7 @@ public final class HttpServer2 implements FilterContainer { for (ServletContextHandler ctx : defaultContexts.keySet()) { defineFilter(ctx, filterHolder, fmap); } - LOG.info("Added global filter '" + name + "' (class=" + classname + ")"); + LOG.info("Added global filter {}' (class={})'", name, classname); } /** @@ -1179,7 +1182,9 @@ public final class HttpServer2 implements FilterContainer { // failed to open w/o issuing a close first, even if the port is changed listener.close(); listener.open(); - LOG.info("Jetty bound to port " + listener.getLocalPort()); + if (LOG.isInfoEnabled()) { + LOG.info("Jetty bound to port {}", listener.getLocalPort()); + } } /** @@ -1286,9 +1291,7 @@ public final class HttpServer2 implements FilterContainer { try { c.close(); } catch (Exception e) { - LOG.error( - "Error while stopping listener for webapp" - + webAppContext.getDisplayName(), e); + LOG.error("Error while stopping listener for webapp{}", webAppContext.getDisplayName(), e); exception = addMultiException(exception, e); } } @@ -1300,16 +1303,15 @@ public final class HttpServer2 implements FilterContainer { webAppContext.clearAttributes(); webAppContext.stop(); } catch (Exception e) { - LOG.error("Error while stopping web app context for webapp " - + webAppContext.getDisplayName(), e); + LOG.error("Error while stopping web app context for webapp {}", webAppContext.getDisplayName(), e); exception = addMultiException(exception, e); } try { webServer.stop(); } catch (Exception e) { - LOG.error("Error while stopping web server for webapp " - + webAppContext.getDisplayName(), e); + LOG.error("Error while stopping web server for webapp {}" + , webAppContext.getDisplayName(), e); exception = addMultiException(exception, e); } @@ -1415,8 +1417,7 @@ public final class HttpServer2 implements FilterContainer { response.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthenticated users are not " + "authorized to access this page."); - LOG.warn("User " + remoteUser + " is unauthorized to access the page " - + request.getRequestURI() + "."); + LOG.warn("User {} is unauthorized to access the page {}.", remoteUser, request.getRequestURI()); return false; } diff --git a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java index 35ef3c60245..45c75d75749 100644 --- a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java +++ b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java @@ -918,12 +918,8 @@ public class TestGroupingSearch extends SolrTestCaseJ4 { Object realResponse = Utils.fromJSONString(strResponse); String err = JSONTestUtil.matchObj("/grouped/" + groupField, realResponse, modelResponse); if (err != null) { - log.error("GROUPING MISMATCH (" + queryIter + "): " + err - + "\n\trequest="+req - + "\n\tresult="+strResponse - + "\n\texpected="+ Utils.toJSONString(modelResponse) - + "\n\tsorted_model="+ sortedGroups - ); + log.error("GROUPING MISMATCH ({}}): {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tsorted_model={}" + , queryIter, err, req, strResponse, Utils.toJSONString(modelResponse), sortedGroups); // re-execute the request... good for putting a breakpoint here for debugging String rsp = h.query(req); @@ -934,12 +930,8 @@ public class TestGroupingSearch extends SolrTestCaseJ4 { // assert post / pre grouping facets err = JSONTestUtil.matchObj("/facet_counts/facet_fields/"+FOO_STRING_FIELD, realResponse, expectedFacetResponse); if (err != null) { - log.error("GROUPING MISMATCH (" + queryIter + "): " + err - + "\n\trequest="+req - + "\n\tresult="+strResponse - + "\n\texpected="+ Utils.toJSONString(expectedFacetResponse) - ); - + log.error("GROUPING MISMATCH ({}): {}\n\trequest={}\n\tresult={}\n\texpected={}" + , queryIter, err, req, strResponse, Utils.toJSONString(expectedFacetResponse)); // re-execute the request... good for putting a breakpoint here for debugging h.query(req); fail(err); diff --git a/solr/core/src/test/org/apache/solr/TestJoin.java b/solr/core/src/test/org/apache/solr/TestJoin.java index 1e0a676dafc..6a46b7e7d6c 100644 --- a/solr/core/src/test/org/apache/solr/TestJoin.java +++ b/solr/core/src/test/org/apache/solr/TestJoin.java @@ -263,11 +263,8 @@ public class TestJoin extends SolrTestCaseJ4 { Object realResponse = Utils.fromJSONString(strResponse); String err = JSONTestUtil.matchObj("/response", realResponse, resultSet); if (err != null) { - log.error("JOIN MISMATCH: " + err - + "\n\trequest="+req - + "\n\tresult="+strResponse - + "\n\texpected="+ Utils.toJSONString(resultSet) - + "\n\tmodel="+ model + log.error("JOIN MISMATCH: {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tmodel={}" + , err, req, strResponse, Utils.toJSONString(resultSet), model ); // re-execute the request... good for putting a breakpoint here for debugging diff --git a/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java b/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java index aaeab54d5fc..a8a86d3f283 100644 --- a/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java +++ b/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java @@ -286,11 +286,8 @@ public class TestRandomDVFaceting extends SolrTestCaseJ4 { for (int i=1; i 1; // No need to clear counter more than one time if (random().nextBoolean() && i > 5 && !clearedCounter) { diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java index 6fc66429b7e..26b0c36a1d8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java @@ -291,8 +291,10 @@ public class ChaosMonkeyNothingIsSafeWithPullReplicasTest extends AbstractFullDi .getResults().getNumFound(); assertTrue("Found " + ctrlDocs + " control docs", cloudClientDocs > 0); - - log.info("collection state: " + printClusterStateInfo(DEFAULT_COLLECTION)); + + if (log.isInfoEnabled()) { + log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); + } if (VERBOSE) System.out.println("control docs:" + controlClient.query(new SolrQuery("*:*")).getResults() diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java index 8f9abffc1f8..e1e9a8705ee 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java @@ -203,10 +203,11 @@ public class ChaosMonkeySafeLeaderWithPullReplicasTest extends AbstractFullDistr Thread.sleep(3000); waitForThingsToLevelOut(3, TimeUnit.MINUTES); - - log.info("control docs:" + controlClient.query(new SolrQuery("*:*")).getResults().getNumFound() + "\n\n"); - - log.info("collection state: " + printClusterStateInfo(DEFAULT_COLLECTION)); + + if (log.isInfoEnabled()) { + log.info("control docs:{}\n\n", controlClient.query(new SolrQuery("*:*")).getResults().getNumFound()); + log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); // logOk + } waitForReplicationFromReplicas(DEFAULT_COLLECTION, cloudClient.getZkStateReader(), new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME)); // waitForAllWarmingSearchers(); diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java index 3892bc6f0a7..050ac34608a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java @@ -205,7 +205,9 @@ public class CollectionPropsTest extends SolrCloudTestCase { // Trigger a value change event log.info("setting value2"); collectionProps.setCollectionProperty(collectionName, "property", "value2"); - log.info("(value2) waitForTrigger=={}", watcher.waitForTrigger()); + if (log.isInfoEnabled()) { + log.info("(value2) waitForTrigger=={}", watcher.waitForTrigger()); + } assertEquals("value2", watcher.getProps().get("property")); // Delete the properties znode diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java index 33a1a55955d..0c945e64352 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java @@ -81,7 +81,9 @@ public class DeleteInactiveReplicaTest extends SolrCloudTestCase { return r == null || r.getState() != Replica.State.ACTIVE; }); - log.info("Removing replica {}/{} ", shard.getName(), replica.getName()); + if (log.isInfoEnabled()) { + log.info("Removing replica {}/{} ", shard.getName(), replica.getName()); + } CollectionAdminRequest.deleteReplica(collectionName, shard.getName(), replica.getName()) .process(cluster.getSolrClient()); waitForState("Expected deleted replica " + replica.getName() + " to be removed from cluster state", collectionName, (n, c) -> { diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java index a6ff54bd899..5fd339e5915 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java @@ -75,7 +75,7 @@ public class DeleteNodeTest extends SolrCloudTestCase { // check what replicas are on the node, and whether the call should fail boolean shouldFail = false; DocCollection docColl = state.getCollection(coll); - log.info("#### DocCollection: " + docColl); + log.info("#### DocCollection: {}", docColl); List replicas = docColl.getReplicas(node2bdecommissioned); if (replicas != null) { for (Replica replica : replicas) { @@ -106,7 +106,9 @@ public class DeleteNodeTest extends SolrCloudTestCase { } Thread.sleep(50); } - log.info("####### DocCollection after: " + cloudClient.getZkStateReader().getClusterState().getCollection(coll)); + if (log.isInfoEnabled()) { + log.info("####### DocCollection after: {}", cloudClient.getZkStateReader().getClusterState().getCollection(coll)); + } if (shouldFail) { assertTrue(String.valueOf(rsp), rsp.getRequestStatus() == RequestStatusState.FAILED); } else { diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java index 253f2ba4dcb..df361124a4f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java @@ -467,7 +467,9 @@ public class DeleteReplicaTest extends SolrCloudTestCase { try { cluster.getSolrClient().waitForState(collectionName, 20, TimeUnit.SECONDS, (liveNodes, collectionState) -> collectionState.getReplicas().size() == 1); } catch (TimeoutException e) { - log.info("Timeout wait for state {}", getCollectionState(collectionName)); + if (log.isInfoEnabled()) { + log.info("Timeout wait for state {}", getCollectionState(collectionName)); + } throw e; } } diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java index bbd6eb063ee..805e013b8e2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java @@ -123,7 +123,7 @@ public class DistribJoinFromCollectionTest extends SolrCloudTestCase{ @AfterClass public static void shutdown() { - log.info("DistribJoinFromCollectionTest logic complete ... deleting the " + toColl + " and " + fromColl + " collections"); + log.info("DistribJoinFromCollectionTest logic complete ... deleting the {} and {} collections", toColl, fromColl); // try to clean up for (String c : new String[]{ toColl, fromColl }) { @@ -132,7 +132,7 @@ public class DistribJoinFromCollectionTest extends SolrCloudTestCase{ req.process(cluster.getSolrClient()); } catch (Exception e) { // don't fail the test - log.warn("Could not delete collection {} after test completed due to: " + e, c); + log.warn("Could not delete collection {} after test completed due to:", c, e); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java b/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java index 0394152ce25..a4b1b1281d1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java @@ -123,7 +123,7 @@ public class DistributedVersionInfoTest extends SolrCloudTestCase { req.setParams(params); req.add(doc); - log.info("Sending doc with out-of-date version ("+(maxOnReplica -1)+") document directly to replica"); + log.info("Sending doc with out-of-date version ({}) document directly to replica", maxOnReplica -1); client.request(req); client.commit(); @@ -244,7 +244,9 @@ public class DistributedVersionInfoTest extends SolrCloudTestCase { cluster.getSolrClient().commit(COLLECTION); - log.info("Total of "+deletedDocs.size()+" docs deleted"); + if (log.isInfoEnabled()) { + log.info("Total of {} docs deleted", deletedDocs.size()); + } maxOnLeader = getMaxVersionFromIndex(leader); maxOnReplica = getMaxVersionFromIndex(replica); @@ -363,7 +365,7 @@ public class DistributedVersionInfoTest extends SolrCloudTestCase { Thread.sleep(1000); // send reload command for the collection - log.info("Sending RELOAD command for " + testCollectionName); + log.info("Sending RELOAD command for {}", testCollectionName); CollectionAdminRequest.reloadCollection(testCollectionName) .process(client); Thread.sleep(2000); // reload can take a short while diff --git a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java index 7509e626e7d..84b3622d435 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java @@ -89,7 +89,9 @@ public class ForceLeaderTest extends HttpPartitionTest { JettySolrRunner notLeader0 = getJettyOnPort(getReplicaPort(notLeaders.get(0))); ZkController zkController = notLeader0.getCoreContainer().getZkController(); - log.info("Before put non leaders into lower term: " + printClusterStateInfo()); + if (log.isInfoEnabled()) { + log.info("Before put non leaders into lower term: {}", printClusterStateInfo()); + } putNonLeadersIntoLowerTerm(testCollectionName, SHARD1, zkController, leader, notLeaders, cloudClient); for (Replica replica : notLeaders) { @@ -109,7 +111,9 @@ public class ForceLeaderTest extends HttpPartitionTest { } } assertEquals(2, numReplicasOnLiveNodes); - log.info("Before forcing leader: " + printClusterStateInfo()); + if (log.isInfoEnabled()) { + log.info("Before forcing leader: {}", printClusterStateInfo()); + } // Assert there is no leader yet assertNull("Expected no leader right now. State: " + clusterState.getCollection(testCollectionName).getSlice(SHARD1), clusterState.getCollection(testCollectionName).getSlice(SHARD1).getLeader()); @@ -124,7 +128,9 @@ public class ForceLeaderTest extends HttpPartitionTest { cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); clusterState = cloudClient.getZkStateReader().getClusterState(); - log.info("After forcing leader: " + clusterState.getCollection(testCollectionName).getSlice(SHARD1)); + if (log.isInfoEnabled()) { + log.info("After forcing leader: {}", clusterState.getCollection(testCollectionName).getSlice(SHARD1)); + } // we have a leader Replica newLeader = clusterState.getCollectionOrNull(testCollectionName).getSlice(SHARD1).getLeader(); assertNotNull(newLeader); @@ -195,7 +201,9 @@ public class ForceLeaderTest extends HttpPartitionTest { } // Kill the leader - log.info("Killing leader for shard1 of " + collectionName + " on node " + leader.getNodeName() + ""); + if (log.isInfoEnabled()) { + log.info("Killing leader for shard1 of {} on node {}", collectionName, leader.getNodeName()); + } leaderJetty.stop(); // Wait for a steady state, till the shard is leaderless @@ -243,14 +251,16 @@ public class ForceLeaderTest extends HttpPartitionTest { waitForRecoveriesToFinish(collection, cloudClient.getZkStateReader(), true); cloudClient.getZkStateReader().forceUpdateCollection(collection); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - log.info("After bringing back leader: " + clusterState.getCollection(collection).getSlice(SHARD1)); + if (log.isInfoEnabled()) { + log.info("After bringing back leader: {}", clusterState.getCollection(collection).getSlice(SHARD1)); + } int numActiveReplicas = getNumberOfActiveReplicas(clusterState, collection, SHARD1); assertEquals(1+notLeaders.size(), numActiveReplicas); - log.info("Sending doc "+docid+"..."); + log.info("Sending doc {}...", docid); sendDoc(docid); log.info("Committing..."); cloudClient.commit(); - log.info("Doc "+docid+" sent and commit issued"); + log.info("Doc {} sent and commit issued", docid); assertDocsExistInAllReplicas(notLeaders, collection, docid, docid); assertDocsExistInAllReplicas(Collections.singletonList(leader), collection, docid, docid); } diff --git a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java index 4bd4b52fa17..4b1d7d43b11 100644 --- a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java @@ -480,7 +480,9 @@ public class FullSolrCloudDistribCmdsTest extends SolrCloudTestCase { for (Replica replica : slice) { try (HttpSolrClient replicaClient = getHttpSolrClient(replica.getCoreUrl())) { final SolrDocumentList replicaResults = replicaClient.query(perReplicaParams).getResults(); - log.debug("Shard {}: Replica ({}) results: {}", shardName, replica.getCoreName(), replicaResults); + if (log.isDebugEnabled()) { + log.debug("Shard {}: Replica ({}) results: {}", shardName, replica.getCoreName(), replicaResults); + } assertEquals("inconsistency w/leader: shard=" + shardName + "core=" + replica.getCoreName(), Collections.emptySet(), CloudInspectUtil.showDiff(leaderResults, replicaResults, diff --git a/solr/core/src/test/org/apache/solr/cloud/FullThrottleStoppableIndexingThread.java b/solr/core/src/test/org/apache/solr/cloud/FullThrottleStoppableIndexingThread.java index 1c5d470beb2..a3a81c53cb6 100644 --- a/solr/core/src/test/org/apache/solr/cloud/FullThrottleStoppableIndexingThread.java +++ b/solr/core/src/test/org/apache/solr/cloud/FullThrottleStoppableIndexingThread.java @@ -107,7 +107,7 @@ class FullThrottleStoppableIndexingThread extends StoppableIndexingThread { } - log.info("FT added docs:" + numAdds + " with " + fails + " fails" + " deletes:" + numDeletes); + log.info("FT added docs:{} with {} fails deletes:{}", numAdds, fails, numDeletes); } private void changeUrlOnError(Exception e) { diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java index 8df61759e84..b5d3638ae5b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java @@ -83,11 +83,15 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest { + printClusterStateInfo(), notLeaders.size() == 1); - log.info("All replicas active for "+testCollectionName); + if (log.isInfoEnabled()) { + log.info("All replicas active for {}", testCollectionName); + } // let's put the leader in its own partition, no replicas can contact it now Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); - log.info("Creating partition to leader at "+leader.getCoreUrl()); + if (log.isInfoEnabled()) { + log.info("Creating partition to leader at {}", leader.getCoreUrl()); + } SocketProxy leaderProxy = getProxyForReplica(leader); leaderProxy.close(); @@ -101,7 +105,9 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest { leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState()); - log.info("Healing partitioned replica at "+leader.getCoreUrl()); + if (log.isInfoEnabled()) { + log.info("Healing partitioned replica at {}", leader.getCoreUrl()); + } leaderProxy.reopen(); Thread.sleep(sleepMsBeforeHealPartition); @@ -126,11 +132,13 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest { + printClusterStateInfo(), notLeaders.size() == 2); - log.info("All replicas active for "+testCollectionName); + log.info("All replicas active for {}", testCollectionName); // let's put the leader in its own partition, no replicas can contact it now Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); - log.info("Creating partition to leader at "+leader.getCoreUrl()); + if (log.isInfoEnabled()) { + log.info("Creating partition to leader at {}", leader.getCoreUrl()); + } SocketProxy leaderProxy = getProxyForReplica(leader); leaderProxy.close(); @@ -143,7 +151,9 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest { leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState()); - log.info("Healing partitioned replica at "+leader.getCoreUrl()); + if (log.isInfoEnabled()) { + log.info("Healing partitioned replica at {}", leader.getCoreUrl()); + } leaderProxy.reopen(); Thread.sleep(sleepMsBeforeHealPartition); @@ -165,21 +175,22 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest { protected void sendCommitWithRetry(Replica replica) throws Exception { String replicaCoreUrl = replica.getCoreUrl(); - log.info("Sending commit request to: "+replicaCoreUrl); + log.info("Sending commit request to: {}", replicaCoreUrl); final RTimer timer = new RTimer(); try (HttpSolrClient client = getHttpSolrClient(replicaCoreUrl)) { try { client.commit(); - log.info("Sent commit request to {} OK, took {}ms", replicaCoreUrl, timer.getTime()); + if (log.isInfoEnabled()) { + log.info("Sent commit request to {} OK, took {}ms", replicaCoreUrl, timer.getTime()); + } } catch (Exception exc) { Throwable rootCause = SolrException.getRootCause(exc); if (rootCause instanceof NoHttpResponseException) { - log.warn("No HTTP response from sending commit request to "+replicaCoreUrl+ - "; will re-try after waiting 3 seconds"); + log.warn("No HTTP response from sending commit request to {}; will re-try after waiting 3 seconds", replicaCoreUrl); Thread.sleep(3000); client.commit(); - log.info("Second attempt at sending commit to "+replicaCoreUrl+" succeeded."); + log.info("Second attempt at sending commit to {} succeeded", replicaCoreUrl); } else { throw exc; } diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java index 96c10e4da5f..e461ef9cd5c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java @@ -260,7 +260,7 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { maxVersionBefore = ulog.getCurrentMaxVersion(); } assertNotNull("max version bucket seed not set for core " + coreName, maxVersionBefore); - log.info("Looked up max version bucket seed "+maxVersionBefore+" for core "+coreName); + log.info("Looked up max version bucket seed {} for core {}", maxVersionBefore, coreName); // now up the stakes and do more docs int numDocs = TEST_NIGHTLY ? 1000 : 105; @@ -296,15 +296,15 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { try (SolrCore core = coreContainer.getCore(coreName)) { assertNotNull("Core '" + coreName + "' not found for replica: " + notLeader.getName(), core); Long currentMaxVersion = core.getUpdateHandler().getUpdateLog().getCurrentMaxVersion(); - log.info("After recovery, looked up NEW max version bucket seed " + currentMaxVersion + - " for core " + coreName + ", was: " + maxVersionBefore); + log.info("After recovery, looked up NEW max version bucket seed {} for core {}, was: {}" + , currentMaxVersion, coreName, maxVersionBefore); assertTrue("max version bucket seed not updated after recovery!", currentMaxVersion > maxVersionBefore); } // verify all docs received assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, numDocs + 3); - log.info("testRf2 succeeded ... deleting the "+testCollectionName+" collection"); + log.info("testRf2 succeeded ... deleting the {} collection", testCollectionName); // try to clean up attemptCollectionDelete(cloudClient, testCollectionName); @@ -374,7 +374,7 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, 4); - log.info("testRf3 succeeded ... deleting the "+testCollectionName+" collection"); + log.info("testRf3 succeeded ... deleting the {} collection", testCollectionName); // try to clean up attemptCollectionDelete(cloudClient, testCollectionName); @@ -433,7 +433,9 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { // TODO: This test logic seems to be timing dependent and fails on Jenkins // need to come up with a better approach - log.info("Sending doc 2 to old leader "+leader.getName()); + if (log.isInfoEnabled()) { + log.info("Sending doc 2 to old leader {}", leader.getName()); + } try ( HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName)) { leaderSolr.add(doc); @@ -458,7 +460,7 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { waitToSeeReplicasActive(testCollectionName, "shard1", replicasToCheck, 30); assertDocsExistInAllReplicas(participatingReplicas, testCollectionName, 1, 2); - log.info("testLeaderZkSessionLoss succeeded ... deleting the "+testCollectionName+" collection"); + log.info("testLeaderZkSessionLoss succeeded ... deleting the {} collection", testCollectionName); // try to clean up attemptCollectionDelete(cloudClient, testCollectionName); @@ -609,7 +611,9 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { final Replica.State state = replica.getState(); if (state != Replica.State.ACTIVE) { - log.info("Replica " + replica.getName() + " is currently " + state); + if (log.isInfoEnabled()) { + log.info("Replica {} is currently {}", replica.getName(), state); + } allReplicasUp = false; } } @@ -626,7 +630,9 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { fail("Didn't see replicas "+ replicasToCheck + " come up within " + maxWaitMs + " ms! ClusterState: " + printClusterStateInfo(testCollectionName)); - log.info("Took {} ms to see replicas [{}] become active.", timer.getTime(), replicasToCheck); + if (log.isInfoEnabled()) { + log.info("Took {} ms to see replicas [{}] become active.", timer.getTime(), replicasToCheck); + } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java b/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java index cccf590c78c..76a3085d6b0 100644 --- a/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java +++ b/solr/core/src/test/org/apache/solr/cloud/KerberosTestServices.java @@ -78,7 +78,7 @@ public class KerberosTestServices { FileUtils.deleteDirectory(workDir); // clean directory numTries++; if (numTries == 3) { - log.error("Failed setting up MiniKDC. Tried " + numTries + " times."); + log.error("Failed setting up MiniKDC. Tried {} times.", numTries); throw e; } log.error("BindException encountered when setting up MiniKdc. Trying again."); diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java index 9c3e83fc2ee..881b68a0a35 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java @@ -99,7 +99,7 @@ public class LeaderElectionTest extends SolrTestCaseJ4 { throws KeeperException, InterruptedException, IOException { super.runLeaderProcess(weAreReplacement, pauseBeforeStartMs); if (runLeaderDelay > 0) { - log.info("Sleeping for " + runLeaderDelay + "ms to simulate leadership takeover delay"); + log.info("Sleeping for {}ms to simulate leadership takeover delay", runLeaderDelay); Thread.sleep(runLeaderDelay); } } @@ -352,7 +352,7 @@ public class LeaderElectionTest extends SolrTestCaseJ4 { @Test public void testParallelElection() throws Exception { final int numShards = 2 + random().nextInt(18); - log.info("Testing parallel election across " + numShards + " shards"); + log.info("Testing parallel election across {} shards", numShards); List threads = new ArrayList<>(); diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java index bde632f5ede..e94783e6a1c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java @@ -142,7 +142,7 @@ public class LeaderFailoverAfterPartitionTest extends HttpPartitionTest { if (oldLeaderProxy != null) { oldLeaderProxy.close(); } else { - log.warn("No SocketProxy found for old leader node "+leaderNode); + log.warn("No SocketProxy found for old leader node {}",leaderNode); } Thread.sleep(10000); // give chance for new leader to be elected. diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java index b053743dabe..9c5ff4d5ef4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java @@ -102,7 +102,9 @@ public class LeaderTragicEventTest extends SolrCloudTestCase { for (String id : addedIds) { assertNotNull(cluster.getSolrClient().getById(collection,id)); } - log.info("The test success oldLeader:{} currentState:{}", oldLeader, getCollectionState(collection)); + if (log.isInfoEnabled()) { + log.info("The test success oldLeader:{} currentState:{}", oldLeader, getCollectionState(collection)); + } } finally { CollectionAdminRequest.deleteCollection(collection).process(cluster.getSolrClient()); @@ -178,7 +180,9 @@ public class LeaderTragicEventTest extends SolrCloudTestCase { if (numReplicas == 2) { Slice shard = getCollectionState(collection).getSlice("shard1"); otherReplicaJetty = cluster.getReplicaJetty(getNonLeader(shard)); - log.info("Stop jetty node : {} state:{}", otherReplicaJetty.getBaseUrl(), getCollectionState(collection)); + if (log.isInfoEnabled()) { + log.info("Stop jetty node : {} state:{}", otherReplicaJetty.getBaseUrl(), getCollectionState(collection)); + } otherReplicaJetty.stop(); cluster.waitForJettyToStop(otherReplicaJetty); waitForState("Timeout waiting for replica get down", collection, (liveNodes, collectionState) -> getNonLeader(collectionState.getSlice("shard1")).getState() != Replica.State.ACTIVE); diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java index 3dfb521d629..08bc9abd83d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java @@ -92,7 +92,9 @@ public class LeaderVoteWaitTimeoutTest extends SolrCloudTestCase { cluster.stopJettySolrRunner(jetty);// TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); proxy.open(jetty.getBaseUrl().toURI()); - log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl()); + if (log.isInfoEnabled()) { + log.info("Adding proxy for URL: {}. Proxy {}", jetty.getBaseUrl(), proxy.getUrl()); + } proxies.put(jetty, proxy); jettys.put(proxy.getUrl(), jetty); } diff --git a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java index 85e13d15fd8..d5439d16578 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java @@ -153,14 +153,14 @@ public class MigrateRouteKeyTest extends SolrCloudTestCase { cluster.getSolrClient().deleteById("a/" + BIT_SEP + "!104"); splitKeyCount[0]--; } catch (Exception e) { - log.warn("Error deleting document a/" + BIT_SEP + "!104", e); + log.warn("Error deleting document a/{}!104", BIT_SEP, e); } cluster.getSolrClient().commit(); collectionClient.commit(); solrQuery = new SolrQuery("*:*").setRows(1000); QueryResponse response = collectionClient.query(solrQuery); - log.info("Response from target collection: " + response); + log.info("Response from target collection: {}", response); assertEquals("DocCount on target collection does not match", splitKeyCount[0], response.getResults().getNumFound()); waitForState("Expected to find routing rule for split key " + splitKey, "sourceCollection", (n, c) -> { @@ -208,7 +208,7 @@ public class MigrateRouteKeyTest extends SolrCloudTestCase { if (splitKey.equals(shardKey)) splitKeyCount++; } catch (Exception e) { - log.error("Exception while adding document id: " + doc.getField("id"), e); + log.error("Exception while adding document id: {}", doc.getField("id"), e); } try { Thread.sleep(50); diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java index 025460c895b..a17cd1a25a1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java @@ -95,7 +95,9 @@ public class MoveReplicaTest extends SolrCloudTestCase { // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void test() throws Exception { String coll = getTestClass().getSimpleName() + "_coll_" + inPlaceMove; - log.info("total_jettys: " + cluster.getJettySolrRunners().size()); + if (log.isInfoEnabled()) { + log.info("total_jettys: {}", cluster.getJettySolrRunners().size()); + } int REPLICATION = 2; CloudSolrClient cloudClient = cluster.getSolrClient(); @@ -154,7 +156,7 @@ public class MoveReplicaTest extends SolrCloudTestCase { boolean recovered = false; for (int i = 0; i < 300; i++) { DocCollection collState = getCollectionState(coll); - log.debug("###### " + collState); + log.debug("###### {}", collState); Collection replicas = collState.getSlice(shardId).getReplicas(); boolean allActive = true; boolean hasLeaders = true; @@ -164,7 +166,7 @@ public class MoveReplicaTest extends SolrCloudTestCase { continue; } if (!r.isActive(Collections.singleton(targetNode))) { - log.info("Not active: " + r); + log.info("Not active: {}", r); allActive = false; } } @@ -182,7 +184,7 @@ public class MoveReplicaTest extends SolrCloudTestCase { recovered = true; break; } else { - log.info("--- waiting, allActive=" + allActive + ", hasLeaders=" + hasLeaders); + log.info("--- waiting, allActive={}, hasLeaders={}", allActive, hasLeaders); Thread.sleep(1000); } } @@ -198,7 +200,7 @@ public class MoveReplicaTest extends SolrCloudTestCase { recovered = false; for (int i = 0; i < 300; i++) { DocCollection collState = getCollectionState(coll); - log.debug("###### " + collState); + log.debug("###### {}", collState); Collection replicas = collState.getSlice(shardId).getReplicas(); boolean allActive = true; boolean hasLeaders = true; @@ -208,7 +210,7 @@ public class MoveReplicaTest extends SolrCloudTestCase { continue; } if (!r.isActive(Collections.singleton(replica.getNodeName()))) { - log.info("Not active yet: " + r); + log.info("Not active yet: {}", r); allActive = false; } } @@ -301,7 +303,9 @@ public class MoveReplicaTest extends SolrCloudTestCase { } assertFalse(success); - log.info("--- current collection state: " + cloudClient.getZkStateReader().getClusterState().getCollection(coll)); + if (log.isInfoEnabled()) { + log.info("--- current collection state: {}", cloudClient.getZkStateReader().getClusterState().getCollection(coll)); + } assertEquals(100, cluster.getSolrClient().query(coll, new SolrQuery("*:*")).getResults().getNumFound()); } diff --git a/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java b/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java index 873480fbab7..6a5187cdcc4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java @@ -58,11 +58,13 @@ public class OutOfBoxZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } createTempDir(); zkDir = createTempDir().resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(); @@ -80,7 +82,9 @@ public class OutOfBoxZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 { zkClient.create(SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH, "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); zkClient.close(); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } @Override @@ -123,7 +127,7 @@ public class OutOfBoxZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 { protected void assertOpenACLUnsafeAllover(SolrZkClient zkClient, String path, List verifiedList) throws Exception { List acls = zkClient.getSolrZooKeeper().getACL(path, new Stat()); if (log.isInfoEnabled()) { - log.info("Verifying " + path); + log.info("Verifying {}", path); } if (ZooDefs.CONFIG_NODE.equals(path)) { // Treat this node specially, from the ZK docs: diff --git a/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java b/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java index 4c299f4d45c..5f2112bac80 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java @@ -63,11 +63,13 @@ public class OverriddenZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } createTempDir(); zkDir =createTempDir().resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(false); @@ -92,7 +94,9 @@ public class OverriddenZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 { zkClient.makePath("/unprotectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); zkClient.close(); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } @Override diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java index 66600153e43..fc60b5db128 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java @@ -745,7 +745,9 @@ public class OverseerCollectionConfigSetProcessorTest extends SolrTestCaseJ4 { overseerMock, completedMapMock, failureMapMock); - log.info("clusterstate " + clusterStateMock.hashCode()); + if (log.isInfoEnabled()) { + log.info("clusterstate {}", clusterStateMock.hashCode()); + } startComponentUnderTest(); diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java index bcfaeda852f..99c40640505 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java @@ -76,7 +76,7 @@ public class OverseerRolesTest extends SolrCloudTestCase { } private void waitForNewOverseer(int seconds, String expected, boolean failOnIntermediateTransition) throws Exception { - log.info("Expecting node: "+expected); + log.info("Expecting node: {}", expected); waitForNewOverseer(seconds, s -> Objects.equals(s, expected), failOnIntermediateTransition); } @@ -97,8 +97,10 @@ public class OverseerRolesTest extends SolrCloudTestCase { } private void logOverseerState() throws KeeperException, InterruptedException { - log.info("Overseer: {}", getLeaderNode(zkClient())); - log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); + if (log.isInfoEnabled()) { + log.info("Overseer: {}", getLeaderNode(zkClient())); + log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); // logOk + } } @Test @@ -195,7 +197,7 @@ public class OverseerRolesTest extends SolrCloudTestCase { logOverseerState(); // kill the current overseer, and check that the next node in the election queue assumes leadership leaderJetty.stop(); - log.info("Killing designated overseer: "+overseer1); + log.info("Killing designated overseer: {}", overseer1); // after 5 seconds, bring back dead designated overseer and assert that it assumes leadership "right away", // i.e. without any other node assuming leadership before this node becomes leader. diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java index 226b5dbac64..255d19933dc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java @@ -558,7 +558,8 @@ public class OverseerTest extends SolrTestCaseJ4 { } } - log.warn("Timeout waiting for collections: " + Arrays.asList(collections) + " state:" + stateReader.getClusterState()); + log.warn("Timeout waiting for collections: {} state: {}" + , Arrays.asList(collections), stateReader.getClusterState()); } @Test @@ -1163,7 +1164,9 @@ public class OverseerTest extends SolrTestCaseJ4 { if (Arrays.binarySearch(interestingOps, op) < 0) continue; Stats.Stat stat = entry.getValue(); - log.info("op: {}, success: {}, failure: {}", op, stat.success.get(), stat.errors.get()); + if (log.isInfoEnabled()) { + log.info("op: {}, success: {}, failure: {}", op, stat.success.get(), stat.errors.get()); + } Timer timer = stat.requestTime; printTimingStats(timer); } @@ -1177,15 +1180,17 @@ public class OverseerTest extends SolrTestCaseJ4 { private void printTimingStats(Timer timer) { Snapshot snapshot = timer.getSnapshot(); - log.info("\t avgRequestsPerSecond: {}", timer.getMeanRate()); - log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate()); - log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate()); - log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean())); - log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian())); - log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile())); - log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile())); - log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile())); - log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile())); + if (log.isInfoEnabled()) { + log.info("\t avgRequestsPerSecond: {}", timer.getMeanRate()); + log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate()); // logOk + log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate()); // logOk + log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean())); // logOk + log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian())); // logOk + log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile())); // logOk + log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile())); // logOk + log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile())); // logOk + log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile())); // logOk + } } private static long nsToMs(double ns) { diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeNoTargetTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeNoTargetTest.java index 6778a155845..843e05c65cb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeNoTargetTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeNoTargetTest.java @@ -60,7 +60,9 @@ public class ReplaceNodeNoTargetTest extends SolrCloudTestCase { @LuceneTestCase.AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-11067") public void test() throws Exception { String coll = "replacenodetest_coll_notarget"; - log.info("total_jettys: " + cluster.getJettySolrRunners().size()); + if (log.isInfoEnabled()) { + log.info("total_jettys: {}", cluster.getJettySolrRunners().size()); + } CloudSolrClient cloudClient = cluster.getSolrClient(); Set liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes(); @@ -79,11 +81,13 @@ public class ReplaceNodeNoTargetTest extends SolrCloudTestCase { cloudClient.request(create); cluster.waitForActiveCollection(coll, 5, 10); - log.info("Current core status list for node we plan to decommision: {} => {}", - node2bdecommissioned, - getCoreStatusForNamedNode(cloudClient, node2bdecommissioned).getCoreStatus()); - - log.info("Decommisioning node: " + node2bdecommissioned); + if (log.isInfoEnabled()) { + log.info("Current core status list for node we plan to decommision: {} => {}", + node2bdecommissioned, + getCoreStatusForNamedNode(cloudClient, node2bdecommissioned).getCoreStatus()); + log.info("Decommisioning node: {}", node2bdecommissioned); + } + createReplaceNodeRequest(node2bdecommissioned, null, null).processAsync("001", cloudClient); CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("001"); boolean success = false; diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java index 0412330b1cd..b60c8508c2e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java @@ -60,7 +60,9 @@ public class ReplaceNodeTest extends SolrCloudTestCase { @Test public void test() throws Exception { String coll = "replacenodetest_coll"; - log.info("total_jettys: " + cluster.getJettySolrRunners().size()); + if (log.isInfoEnabled()) { + log.info("total_jettys: {}", cluster.getJettySolrRunners().size()); + } CloudSolrClient cloudClient = cluster.getSolrClient(); Set liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes(); @@ -89,7 +91,7 @@ public class ReplaceNodeTest extends SolrCloudTestCase { cluster.waitForActiveCollection(coll, 5, 5 * (create.getNumNrtReplicas() + create.getNumPullReplicas() + create.getNumTlogReplicas())); DocCollection collection = cloudClient.getZkStateReader().getClusterState().getCollection(coll); - log.debug("### Before decommission: " + collection); + log.debug("### Before decommission: {}", collection); log.info("excluded_node : {} ", emptyNode); createReplaceNodeRequest(node2bdecommissioned, emptyNode, null).processAsync("000", cloudClient); CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("000"); @@ -111,13 +113,13 @@ public class ReplaceNodeTest extends SolrCloudTestCase { Thread.sleep(5000); collection = cloudClient.getZkStateReader().getClusterState().getCollection(coll); - log.debug("### After decommission: " + collection); + log.debug("### After decommission: {}", collection); // check what are replica states on the decommissioned node List replicas = collection.getReplicas(node2bdecommissioned); if (replicas == null) { replicas = Collections.emptyList(); } - log.debug("### Existing replicas on decommissioned node: " + replicas); + log.debug("### Existing replicas on decommissioned node: {}", replicas); //let's do it back - this time wait for recoveries CollectionAdminRequest.AsyncCollectionAdminRequest replaceNodeRequest = createReplaceNodeRequest(emptyNode, node2bdecommissioned, Boolean.TRUE); diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java index 3fc3580d2ea..b4e7e286b83 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java @@ -94,8 +94,10 @@ public class ReplicationFactorTest extends AbstractFullDistribZkTestBase { testRf2NotUsingDirectUpdates(); waitForThingsToLevelOut(30, TimeUnit.SECONDS); - log.info("replication factor testing complete! final clusterState is: "+ - cloudClient.getZkStateReader().getClusterState()); + if (log.isInfoEnabled()) { + log.info("replication factor testing complete! final clusterState is: {}", + cloudClient.getZkStateReader().getClusterState()); + } } protected void testRf2NotUsingDirectUpdates() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java index 3a0d7311dc9..a006d94335a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java @@ -83,7 +83,7 @@ public class RollingRestartTest extends AbstractFullDistribZkTestBase { boolean sawLiveDesignate = false; int numRestarts = 1 + random().nextInt(TEST_NIGHTLY ? 12 : 2); for (int i = 0; i < numRestarts; i++) { - log.info("Rolling restart #{}", i + 1); + log.info("Rolling restart #{}", i + 1); // logOk for (CloudJettyRunner cloudJetty : designateJettys) { log.info("Restarting {}", cloudJetty); chaosMonkey.stopJetty(cloudJetty); @@ -95,8 +95,8 @@ public class RollingRestartTest extends AbstractFullDistribZkTestBase { if (!success) { leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient()); if (leader == null) - log.error("NOOVERSEER election queue is :" + - OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), + log.error("NOOVERSEER election queue is : {}" + , OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), "/overseer_elect/election")); fail("No overseer designate as leader found after restart #" + (i + 1) + ": " + leader); } @@ -106,8 +106,8 @@ public class RollingRestartTest extends AbstractFullDistribZkTestBase { if (!success) { leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient()); if (leader == null) - log.error("NOOVERSEER election queue is :" + - OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), + log.error("NOOVERSEER election queue is :{}" + , OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), "/overseer_elect/election")); fail("No overseer leader found after restart #" + (i + 1) + ": " + leader); } diff --git a/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java b/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java index 4e3d62e30e2..aaeb9a9d5bc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java @@ -69,11 +69,13 @@ public class SaslZkACLProviderTest extends SolrTestCaseJ4 { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } createTempDir(); Path zkDir = createTempDir().resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new SaslZkTestServer(zkDir, createTempDir().resolve("miniKdc")); zkServer.run(); @@ -84,7 +86,9 @@ public class SaslZkACLProviderTest extends SolrTestCaseJ4 { } setupZNodes(); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } protected void setupZNodes() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java index 289fb3e42f4..f86284a5797 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java @@ -98,7 +98,7 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase { SolrCLI.CreateCollectionTool tool = new SolrCLI.CreateCollectionTool(); CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); - log.info("Creating the '"+testCollectionName+"' collection using SolrCLI with: "+solrUrl); + log.info("Creating the '{}' collection using SolrCLI with: {}", testCollectionName, solrUrl); tool.runTool(cli); assertTrue("Collection '" + testCollectionName + "' doesn't exist after trying to create it!", cloudClient.getZkStateReader().getClusterState().hasCollection(testCollectionName)); @@ -140,7 +140,9 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase { expectedXmlFileCount, xmlFiles.size()); for (File xml : xmlFiles) { - log.info("POSTing "+xml.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("POSTing {}", xml.getAbsolutePath()); + } cloudClient.request(new StreamingUpdateRequest("/update",xml,"application/xml")); } cloudClient.commit(); @@ -156,14 +158,14 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase { } assertEquals("*:* found unexpected number of documents", expectedXmlDocCount, numFound); - log.info("Updating Config for " + testCollectionName); + log.info("Updating Config for {}", testCollectionName); doTestConfigUpdate(testCollectionName, solrUrl); - log.info("Running healthcheck for " + testCollectionName); + log.info("Running healthcheck for {}", testCollectionName); doTestHealthcheck(testCollectionName, cloudClient.getZkHost()); // verify the delete action works too - log.info("Running delete for "+testCollectionName); + log.info("Running delete for {}", testCollectionName); doTestDeleteAction(testCollectionName, solrUrl); log.info("testLoadDocsIntoGettingStartedCollection succeeded ... shutting down now!"); @@ -218,7 +220,7 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase { SolrCLI.ConfigTool tool = new SolrCLI.ConfigTool(); CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); - log.info("Sending set-property '" + prop + "'=" + maxTime + " to SolrCLI.ConfigTool."); + log.info("Sending set-property '{}'={} to SolrCLI.ConfigTool.", prop, maxTime); assertTrue("Set config property failed!", tool.runTool(cli) == 0); configJson = SolrCLI.getJson(configUrl); @@ -234,7 +236,9 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase { assertEquals("Should have been able to get a value from the /query request handler", "explicit", SolrCLI.atPath("/config/requestHandler/\\/query/defaults/echoParams", configJson)); - log.info("live_nodes_count : " + cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + if (log.isInfoEnabled()) { + log.info("live_nodes_count : {}", cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + } // Since it takes some time for this command to complete we need to make sure all the reloads for // all the cores have been done. diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java index e8d0e92bad0..8e1358c9fb8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java @@ -81,7 +81,9 @@ public class SolrXmlInZkTest extends SolrTestCaseJ4 { zkClient.close(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } // set some system properties for use by tests Properties props = new Properties(); @@ -89,7 +91,9 @@ public class SolrXmlInZkTest extends SolrTestCaseJ4 { props.setProperty("solr.test.sys.prop2", "proptwo"); cfg = SolrDispatchFilter.loadNodeConfig(solrHome, props); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } private void closeZK() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java b/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java index 9d780d310a9..98240e6c250 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java @@ -181,7 +181,7 @@ public class SplitShardTest extends SolrCloudTestCase { long numFound = 0; try { numFound = replicaClient.query(params("q", "*:*", "distrib", "false")).getResults().getNumFound(); - log.info("Replica count=" + numFound + " for " + replica); + log.info("Replica count={} for {}", numFound, replica); } finally { replicaClient.close(); } @@ -269,11 +269,11 @@ public class SplitShardTest extends SolrCloudTestCase { String id = (String) doc.get("id"); leftover.remove(id); } - log.error("MISSING DOCUMENTS: " + leftover); + log.error("MISSING DOCUMENTS: {}", leftover); } assertEquals("Documents are missing!", docsIndexed.get(), numDocs); - log.info("Number of documents indexed and queried : " + numDocs); + log.info("Number of documents indexed and queried : {}", numDocs); } diff --git a/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java b/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java index e78f3eaf21d..bede77537d5 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java @@ -130,7 +130,7 @@ public class SystemCollectionCompatTest extends SolrCloudTestCase { long currentTime = getCoreStatus(r).getCoreStartTime().getTime(); allReloaded = allReloaded && (previousTime < currentTime); } catch (Exception e) { - log.warn("Error retrieving replica status of " + Utils.toJSONString(r), e); + log.warn("Error retrieving replica status of {}", Utils.toJSONString(r), e); allReloaded = false; } } @@ -185,7 +185,9 @@ public class SystemCollectionCompatTest extends SolrCloudTestCase { watcher.reset(); // restart Overseer to trigger the back-compat check - log.info("Stopping Overseer Node: {} ({})", overseerNode.getNodeName(), overseerNode.getLocalPort()); + if (log.isInfoEnabled()) { + log.info("Stopping Overseer Node: {} ({})", overseerNode.getNodeName(), overseerNode.getLocalPort()); + } cluster.stopJettySolrRunner(overseerNode); log.info("Waiting for new overseer election..."); TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, cloudManager.getTimeSource()); @@ -217,11 +219,15 @@ public class SystemCollectionCompatTest extends SolrCloudTestCase { continue; } if (doc.getFieldValue("message").toString().contains("re-indexing")) { - log.info("Found re-indexing message: {}", doc.getFieldValue("message")); + if (log.isInfoEnabled()) { + log.info("Found re-indexing message: {}", doc.getFieldValue("message")); + } foundWarning = true; } if (doc.getFieldValue("message").toString().contains("timestamp")) { - log.info("Found timestamp message: {}", doc.getFieldValue("message")); + if (log.isInfoEnabled()) { + log.info("Found timestamp message: {}", doc.getFieldValue("message")); + } foundSchemaWarning = true; } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java b/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java index c3635fb77c2..c7b197a8eb9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java @@ -149,7 +149,7 @@ public class TestAuthenticationFramework extends SolrCloudTestCase { String username = httpRequest.getHeader("username"); String password = httpRequest.getHeader("password"); - log.info("Username: "+username+", password: "+password); + log.info("Username: {}, password: {}", username, password); if(MockAuthenticationPlugin.expectedUsername.equals(username) && MockAuthenticationPlugin.expectedPassword.equals(password)) { filterChain.doFilter(request, response); return true; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java index 9d04f99c629..916836861cf 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java @@ -69,7 +69,9 @@ public class TestCloudConsistency extends SolrCloudTestCase { cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); proxy.open(jetty.getBaseUrl().toURI()); - log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl()); + if (log.isInfoEnabled()) { + log.info("Adding proxy for URL: {}. Proxy: {}", jetty.getBaseUrl(), proxy.getUrl()); + } proxies.put(jetty, proxy); jettys.put(proxy.getUrl(), jetty); } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java index e5fef491b38..64f3466941b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java @@ -362,7 +362,7 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase { // no stats for this pivot, nothing to check // TODO: use a trace param to know if/how-many to expect ? - log.info("No stats to check for => " + message); + log.info("No stats to check for => {}", message); return; } @@ -380,7 +380,7 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase { // StatsComponent results being "null" (and not even included in the // getFieldStatsInfo() Map due to specila SolrJ logic) - log.info("Requested stats missing in verification query, pivot stats: " + pivotStats); + log.info("Requested stats missing in verification query, pivot stats: {}", pivotStats); assertEquals("Special Count", 0L, pivotStats.getCount().longValue()); assertEquals("Special Missing", constraint.getCount(), pivotStats.getMissing().longValue()); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java index 70680c250a9..54bd9b7e2f1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java @@ -259,7 +259,9 @@ public class TestCloudSearcherWarming extends SolrCloudTestCase { log.info("Active replica: {}", coreNodeName); for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) { JettySolrRunner jettySolrRunner = cluster.getJettySolrRunner(i); - log.info("Checking node: {}", jettySolrRunner.getNodeName()); + if (log.isInfoEnabled()) { + log.info("Checking node: {}", jettySolrRunner.getNodeName()); + } if (jettySolrRunner.getNodeName().equals(replica.getNodeName())) { SolrDispatchFilter solrDispatchFilter = jettySolrRunner.getSolrDispatchFilter(); try (SolrCore core = solrDispatchFilter.getCores().getCore(coreName)) { @@ -268,7 +270,9 @@ public class TestCloudSearcherWarming extends SolrCloudTestCase { assert false; return false; } - log.info("Found SolrCore: {}, id: {}", core.getName(), core); + if (log.isInfoEnabled()) { + log.info("Found SolrCore: {}, id: {}", core.getName(), core); + } RefCounted registeredSearcher = core.getRegisteredSearcher(); if (registeredSearcher != null) { log.error("registered searcher not null, maxdocs = {}", registeredSearcher.get().maxDoc()); @@ -326,13 +330,19 @@ public class TestCloudSearcherWarming extends SolrCloudTestCase { if (sleepTime.get() > 0) { TestCloudSearcherWarming.coreNodeNameRef.set(newSearcher.getCore().getCoreDescriptor().getCloudDescriptor().getCoreNodeName()); TestCloudSearcherWarming.coreNameRef.set(newSearcher.getCore().getName()); - log.info("Sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + if (log.isInfoEnabled()) { + log.info("Sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}" + , sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + } try { Thread.sleep(sleepTime.get()); } catch (InterruptedException e) { log.warn("newSearcher was interupdated", e); } - log.info("Finished sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + if (log.isInfoEnabled()) { + log.info("Finished sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}" + , sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + } } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java index 6939aa232e3..73c1dc9224d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java @@ -500,10 +500,14 @@ public class TestConfigSetsAPI extends SolrTestCaseJ4 { File.separator + TestUtil.randomSimpleString(random(), 6, 8) + ".zip"); File directory = TestDynamicLoading.getFile(directoryPath); - log.info("Directory: "+directory.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("Directory: {}", directory.getAbsolutePath()); + } try { zip (directory, zipFile); - log.info("Zipfile: "+zipFile.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("Zipfile: {}", zipFile.getAbsolutePath()); + } return zipFile.getAbsolutePath(); } catch (IOException e) { throw new RuntimeException(e); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java b/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java index e6048e15312..207e255e297 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java @@ -64,7 +64,7 @@ public class TestDynamicFieldNamesIndexCorrectly extends AbstractFullDistribZkTe void populateIndex(int numRuns) throws IOException, SolrServerException { try { for (int i = 0; i < numRuns; i++) { - log.debug("Iteration number: " + i); + log.debug("Iteration number: {}", i); cloudClient.deleteByQuery(COLLECTION, "*:*"); cloudClient.commit(COLLECTION); @@ -74,7 +74,7 @@ public class TestDynamicFieldNamesIndexCorrectly extends AbstractFullDistribZkTe final SolrQuery solrQuery = new SolrQuery("*:*"); solrQuery.setRows(solrDocs.size()); final SolrDocumentList resultDocs = getSolrResponse(solrQuery, COLLECTION); - log.debug(resultDocs.toString()); + log.debug("{}", resultDocs); assertThatDocsHaveCorrectFields(solrDocs, resultDocs); } } finally { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java index c085b2876a8..6bac5e84355 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java @@ -77,7 +77,9 @@ public class TestLeaderElectionZkExpiry extends SolrTestCaseJ4 { try { String leaderNode = OverseerCollectionConfigSetProcessor.getLeaderNode(zc); if (leaderNode != null && !leaderNode.trim().isEmpty()) { - log.info("Time={} Overseer leader is = {}", System.nanoTime(), leaderNode); + if (log.isInfoEnabled()) { + log.info("Time={} Overseer leader is = {}", System.nanoTime(), leaderNode); + } found = true; break; } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java index 50404be5d63..f0af1445570 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java @@ -334,13 +334,17 @@ public class TestPullReplica extends SolrCloudTestCase { return false; } statesSeen.add(r.getState()); - log.info("CollectionStateWatcher saw state: {}", r.getState()); + if (log.isInfoEnabled()) { + log.info("CollectionStateWatcher saw state: {}", r.getState()); + } return r.getState() == Replica.State.ACTIVE; }); CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.PULL).process(cluster.getSolrClient()); waitForState("Replica not added", collectionName, activeReplicaCount(1, 0, 1)); zkClient().printLayoutToStream(System.out); - log.info("Saw states: " + Arrays.toString(statesSeen.toArray())); + if (log.isInfoEnabled()) { + log.info("Saw states: {}", Arrays.toString(statesSeen.toArray())); + } assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), 3, statesSeen.size()); assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.DOWN, statesSeen.get(0)); assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.RECOVERING, statesSeen.get(0)); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java index a44958913fe..6a79c613f40 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java @@ -85,7 +85,9 @@ public class TestPullReplicaErrorHandling extends SolrCloudTestCase { cluster.startJettySolrRunner(jetty); cluster.waitForAllNodes(30); proxy.open(jetty.getBaseUrl().toURI()); - log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl()); + if (log.isInfoEnabled()) { + log.info("Adding proxy for URL: {}. Proxy: {}", jetty.getBaseUrl(), proxy.getUrl()); + } proxies.put(proxy.getUrl(), proxy); jettys.put(proxy.getUrl(), jetty); } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java index 703f02d6bde..9ecc474e204 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java @@ -119,7 +119,7 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase long expectedTotalRequests = 0; Set uniqueCoreNames = new LinkedHashSet<>(); - log.info("Making requests to " + baseUrl + "a1x2"); + log.info("Making requests to {} a1x2", baseUrl); while (uniqueCoreNames.size() < counters.keySet().size() && expectedTotalRequests < 1000L) { expectedTotalRequests++; client.query(new SolrQuery("*:*")); @@ -135,7 +135,7 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase assertEquals("Sanity Check: Num Queries So Far Doesn't Match Total????", expectedTotalRequests, actualTotalRequests); } - log.info("Total requests: " + expectedTotalRequests); + log.info("Total requests: {}", expectedTotalRequests); assertEquals("either request randomization code is broken of this test seed is really unlucky, " + "Gave up waiting for requests to hit every core at least once after " + expectedTotalRequests + " requests", @@ -179,7 +179,9 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase ZkStateReader.ROLES_PROP, "", ZkStateReader.STATE_PROP, Replica.State.DOWN.toString()); - log.info("Forcing {} to go into 'down' state", notLeader.getStr(ZkStateReader.CORE_NAME_PROP)); + if (log.isInfoEnabled()) { + log.info("Forcing {} to go into 'down' state", notLeader.getStr(ZkStateReader.CORE_NAME_PROP)); + } ZkDistributedQueue q = jettys.get(0).getCoreContainer().getZkController().getOverseer().getStateUpdateQueue(); q.offer(Utils.toJSON(m)); @@ -190,7 +192,7 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase String baseUrl = notLeader.getStr(ZkStateReader.BASE_URL_PROP); if (!baseUrl.endsWith("/")) baseUrl += "/"; String path = baseUrl + "football"; - log.info("Firing queries against path=" + path); + log.info("Firing queries against path={}", path); try (HttpSolrClient client = getHttpSolrClient(path, 2000, 5000)) { SolrCore leaderCore = null; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java index 3daaa6485a9..2923211109c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithKerberosAlt.java @@ -97,7 +97,7 @@ public class TestSolrCloudWithKerberosAlt extends SolrCloudTestCase { System.setProperty("solr.kerberos.keytab", keytabFile.getAbsolutePath()); System.setProperty("authenticationPlugin", "org.apache.solr.security.KerberosPlugin"); boolean enableDt = random().nextBoolean(); - log.info("Enable delegation token: " + enableDt); + log.info("Enable delegation token: {}", enableDt); System.setProperty("solr.kerberos.delegation.token.enabled", Boolean.toString(enableDt)); // Extracts 127.0.0.1 from HTTP/127.0.0.1@EXAMPLE.COM System.setProperty("solr.kerberos.name.rules", "RULE:[1:$1@$0](.*EXAMPLE.COM)s/@.*//" diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java b/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java index dd107f8e32e..15d7c46aa23 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java @@ -270,7 +270,8 @@ public class TestStressCloudBlindAtomicUpdates extends SolrCloudTestCase { final int numDocsInIndex = (numDocsToCheck * DOC_ID_INCR); final AtomicLong[] expected = new AtomicLong[numDocsToCheck]; - log.info("Testing " + numericFieldName + ": numDocsToCheck=" + numDocsToCheck + ", numDocsInIndex=" + numDocsInIndex + ", incr=" + DOC_ID_INCR); + log.info("Testing {}: numDocsToCheck={}, numDocsInIndex={}, incr={}" + , numericFieldName, numDocsToCheck, numDocsInIndex, DOC_ID_INCR); // seed the index & keep track of what docs exist and with what values for (int id = 0; id < numDocsInIndex; id++) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java b/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java index 01314d3ea68..4230ec1fb3a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java @@ -134,11 +134,13 @@ public class TestStressInPlaceUpdates extends AbstractFullDistribZkTestBase { int fullUpdatePercent = 20; **/ - log.info("{}", Arrays.asList - ("commitPercent", commitPercent, "softCommitPercent", softCommitPercent, + if (log.isInfoEnabled()) { + log.info("{}", Arrays.asList + ("commitPercent", commitPercent, "softCommitPercent", softCommitPercent, "deletePercent", deletePercent, "deleteByQueryPercent", deleteByQueryPercent, "ndocs", ndocs, "nWriteThreads", nWriteThreads, "percentRealtimeQuery", percentRealtimeQuery, "operations", operations, "nReadThreads", nReadThreads)); + } initModel(ndocs); @@ -218,8 +220,8 @@ public class TestStressInPlaceUpdates extends AbstractFullDistribZkTestBase { try { returnedVersion = deleteDocAndGetVersion(Integer.toString(id), params("_version_", Long.toString(info.version)), dbq); - log.info(delType + ": Deleting id=" + id + ", version=" + info.version - + ". Returned version=" + returnedVersion); + log.info("{}: Deleting id={}, version={}. Returned version={}" + , delType, id, info.version, returnedVersion); } catch (RuntimeException e) { if (e.getMessage() != null && e.getMessage().contains("version conflict") || e.getMessage() != null && e.getMessage().contains("Conflict")) { @@ -254,7 +256,8 @@ public class TestStressInPlaceUpdates extends AbstractFullDistribZkTestBase { nextVal2 = nextVal1 * 1000000000l; try { returnedVersion = addDocAndGetVersion("id", id, "title_s", "title" + id, "val1_i_dvo", nextVal1, "val2_l_dvo", nextVal2, "_version_", info.version); - log.info("FULL: Writing id=" + id + ", val=[" + nextVal1 + "," + nextVal2 + "], version=" + info.version + ", Prev was=[" + val1 + "," + val2 + "]. Returned version=" + returnedVersion); + log.info("FULL: Writing id={}, val=[{},{}], version={}, Prev was=[{},{}]. Returned version={}" + ,id, nextVal1, nextVal2, info.version, val1, val2, returnedVersion); } catch (RuntimeException e) { if (e.getMessage() != null && e.getMessage().contains("version conflict") @@ -271,7 +274,8 @@ public class TestStressInPlaceUpdates extends AbstractFullDistribZkTestBase { nextVal2 = val2 + val1; try { returnedVersion = addDocAndGetVersion("id", id, "val2_l_dvo", map("inc", String.valueOf(val1)), "_version_", info.version); - log.info("PARTIAL: Writing id=" + id + ", val=[" + nextVal1 + "," + nextVal2 + "], version=" + info.version + ", Prev was=[" + val1 + "," + val2 + "]. Returned version=" + returnedVersion); + log.info("PARTIAL: Writing id={}, val=[{},{}], version={}, Prev was=[{},{}]. Returned version={}" + ,id, nextVal1, nextVal2, info.version, val1, val2, returnedVersion); } catch (RuntimeException e) { if (e.getMessage() != null && e.getMessage().contains("version conflict") || e.getMessage() != null && e.getMessage().contains("Conflict")) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java b/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java index b97ae0abab8..06be968d124 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java @@ -112,8 +112,10 @@ public class TestStressLiveNodes extends SolrCloudTestCase { for (int i = 0; i < 10; i++) { result = new ArrayList<>(CLOUD_CLIENT.getZkStateReader().getClusterState().getLiveNodes()); if (expectedCount != result.size()) { - log.info("sleeping #{} to give watchers a chance to finish: {} != {}", - i, expectedCount, result.size()); + if (log.isInfoEnabled()) { + log.info("sleeping #{} to give watchers a chance to finish: {} != {}", + i, expectedCount, result.size()); + } Thread.sleep(200); } else { break; @@ -235,7 +237,7 @@ public class TestStressLiveNodes extends SolrCloudTestCase { client.makePath(nodePath, CreateMode.EPHEMERAL, true); numAdded++; } catch (Exception e) { - log.error("failed to create: " + nodePath, e); + log.error("failed to create: {}", nodePath, e); } } return numAdded; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java index d1c4d226962..8b9f1cfd52b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java @@ -88,7 +88,9 @@ public class TestTlogReplayVsRecovery extends SolrCloudTestCase { cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); proxy.open(jetty.getBaseUrl().toURI()); - log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl()); + if (log.isInfoEnabled()) { + log.info("Adding proxy for URL: {}. Proxy: {}", jetty.getBaseUrl(), proxy.getUrl()); + } proxies.put(jetty, proxy); jettys.put(proxy.getUrl(), jetty); } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java index 57129c3ea10..8f7e27bd4d5 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java @@ -434,7 +434,7 @@ public class TestTlogReplica extends SolrCloudTestCase { } log.error("Unsuccessful attempt to add replica. Attempt: {}/{}", i, maxAttempts); } catch (SolrException e) { - log.error("Exception while adding replica. Attempt: " + i + "/" + maxAttempts, e); + log.error("Exception while adding replica. Attempt: {}/{}", i, maxAttempts, e); } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java index ef07a773b24..d91078fac82 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java @@ -264,8 +264,10 @@ public class TestTolerantUpdateProcessorRandomCloud extends SolrCloudTestCase { final UpdateResponse rsp = req.process(client); assertUpdateTolerantErrors(client.toString() + " => " + expectedErrors.toString(), rsp, expectedErrors.toArray(new ExpectedErr[expectedErrors.size()])); - - log.info("END ITER #{}, expecting #docs: {}", i, expectedDocIds.cardinality()); + + if (log.isInfoEnabled()) { + log.info("END ITER #{}, expecting #docs: {}", i, expectedDocIds.cardinality()); + } assertEquals("post update commit failed?", 0, CLOUD_CLIENT.commit().getStatus()); @@ -273,7 +275,7 @@ public class TestTolerantUpdateProcessorRandomCloud extends SolrCloudTestCase { if (expectedDocIds.cardinality() == countDocs(CLOUD_CLIENT)) { break; } - log.info("sleeping to give searchers a chance to re-open #" + j); + log.info("sleeping to give searchers a chance to re-open #{}", j); Thread.sleep(200); } @@ -289,7 +291,7 @@ public class TestTolerantUpdateProcessorRandomCloud extends SolrCloudTestCase { for (int b = x.nextSetBit(0); 0 <= b; b = x.nextSetBit(b+1)) { final boolean expectedBit = expectedDocIds.get(b); final boolean actualBit = actualDocIds.get(b); - log.error("bit #"+b+" mismatch: expected {} BUT actual {}", expectedBit, actualBit); + log.error("bit #{} mismatch: expected {} BUT actual {}", b, expectedBit, actualBit); } assertEquals(x.cardinality() + " mismatched bits", expectedDocIds.cardinality(), actualDocIds.cardinality()); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestUtilizeNode.java b/solr/core/src/test/org/apache/solr/cloud/TestUtilizeNode.java index 96d7704a2ad..08348917f66 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestUtilizeNode.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestUtilizeNode.java @@ -87,7 +87,9 @@ public class TestUtilizeNode extends SolrCloudTestCase { assertNoReplicas("jettyX should not yet be utilized: ", coll, jettyX); - log.info("Sending UTILIZE command for jettyX ({})", jettyX.getNodeName()); + if (log.isInfoEnabled()) { + log.info("Sending UTILIZE command for jettyX ({})", jettyX.getNodeName()); + } cloudClient.request(new CollectionAdminRequest.UtilizeNode(jettyX.getNodeName())); // TODO: aparently we can't assert this? ... @@ -103,7 +105,9 @@ public class TestUtilizeNode extends SolrCloudTestCase { // // should we skip spinning up a *new* jettyX, and instead just pick an existing jetty? - log.info("jettyX replicas prior to being blacklisted: {}", getReplicaList(coll, jettyX)); + if (log.isInfoEnabled()) { + log.info("jettyX replicas prior to being blacklisted: {}", getReplicaList(coll, jettyX)); + } String setClusterPolicyCommand = "{" + " 'set-cluster-policy': [" + @@ -111,8 +115,10 @@ public class TestUtilizeNode extends SolrCloudTestCase { " , 'replica':0}" + " ]" + "}"; - log.info("Setting new policy to blacklist jettyX ({}) port={}", - jettyX.getNodeName(), jettyX.getLocalPort()); + if (log.isInfoEnabled()) { + log.info("Setting new policy to blacklist jettyX ({}) port={}", + jettyX.getNodeName(), jettyX.getLocalPort()); + } SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand); NamedList response = cloudClient.request(req); assertEquals(req + " => " + response, @@ -123,9 +129,10 @@ public class TestUtilizeNode extends SolrCloudTestCase { cluster.waitForAllNodes(30); assertNoReplicas("jettyY should not yet be utilized: ", coll, jettyY); - - log.info("jettyX replicas prior to utilizing jettyY: {}", getReplicaList(coll, jettyX)); - log.info("Sending UTILIZE command for jettyY ({})", jettyY.getNodeName()); + if (log.isInfoEnabled()) { + log.info("jettyX replicas prior to utilizing jettyY: {}", getReplicaList(coll, jettyX)); + log.info("Sending UTILIZE command for jettyY ({})", jettyY.getNodeName()); // logOk + } cloudClient.request(new CollectionAdminRequest.UtilizeNode(jettyY.getNodeName())); assertSomeReplicas("jettyY should now be utilized: ", coll, jettyY); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java b/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java index 45dd428f237..1b820a4388c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java @@ -145,8 +145,10 @@ public class TestWaitForStateWithJettyShutdowns extends SolrTestCaseJ4 { } public boolean matches(Set liveNodes, DocCollection collectionState) { final boolean result = inner.matches(liveNodes, collectionState); - log.info("Predicate called: result={}, (pre)latch={}, liveNodes={}, state={}", - result, latch.getCount(), liveNodes, collectionState); + if (log.isInfoEnabled()) { + log.info("Predicate called: result={}, (pre)latch={}, liveNodes={}, state={}", + result, latch.getCount(), liveNodes, collectionState); + } latch.countDown(); return result; } diff --git a/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java b/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java index a85f0f0c376..b9db03d8174 100644 --- a/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java @@ -57,11 +57,13 @@ public class VMParamsZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } createTempDir(); zkDir = createTempDir().resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(false); @@ -92,7 +94,9 @@ public class VMParamsZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 { zkClient.makePath("/unprotectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); zkClient.close(); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } @Override diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java b/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java index c9c4b87ab6f..b3a14dac97d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java @@ -78,7 +78,9 @@ public class ZkCLITest extends SolrTestCaseJ4 { @Override public void setUp() throws Exception { super.setUp(); - log.info("####SETUP_START " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_START {}", getTestName()); + } String exampleHome = SolrJettyTestBase.legacyExampleCollection1SolrHome(); @@ -86,7 +88,7 @@ public class ZkCLITest extends SolrTestCaseJ4 { solrHome = exampleHome; zkDir = tmpDir.resolve("zookeeper/server1/data"); - log.info("ZooKeeper dataDir:" + zkDir); + log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(); System.setProperty("zkHost", zkServer.getZkAddress()); @@ -98,7 +100,9 @@ public class ZkCLITest extends SolrTestCaseJ4 { this.zkClient = new SolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); - log.info("####SETUP_END " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####SETUP_END {}", getTestName()); + } } @Test diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/BaseCdcrDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/BaseCdcrDistributedZkTest.java index a0013e78303..9ed68583438 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/BaseCdcrDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/BaseCdcrDistributedZkTest.java @@ -294,7 +294,7 @@ public class BaseCdcrDistributedZkTest extends AbstractDistribZkTestBase { } protected void waitForCdcrStateReplication(String collection) throws Exception { - log.info("Wait for CDCR state to replicate - collection: " + collection); + log.info("Wait for CDCR state to replicate - collection: {}", collection); int cnt = 30; while (cnt > 0) { @@ -321,7 +321,7 @@ public class BaseCdcrDistributedZkTest extends AbstractDistribZkTestBase { } } - log.info("CDCR state is identical across nodes - collection: " + collection); + log.info("CDCR state is identical across nodes - collection: {}", collection); } /** @@ -485,7 +485,7 @@ public class BaseCdcrDistributedZkTest extends AbstractDistribZkTestBase { res = new CollectionAdminResponse(); res.setResponse(client.request(request)); } catch (Exception e) { - log.warn("Error while deleting the collection " + collectionName, e); + log.warn("Error while deleting the collection {}", collectionName, e); return new CollectionAdminResponse(); } finally { client.close(); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBidirectionalTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBidirectionalTest.java index 4c7f15f606a..fdd5317edf6 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBidirectionalTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBidirectionalTest.java @@ -49,10 +49,14 @@ public class CdcrBidirectionalTest extends SolrTestCaseJ4 { MiniSolrCloudCluster cluster2 = new MiniSolrCloudCluster(1, createTempDir("cdcr-cluster2"), buildJettyConfig("/solr")); MiniSolrCloudCluster cluster1 = new MiniSolrCloudCluster(1, createTempDir("cdcr-cluster1"), buildJettyConfig("/solr")); try { - log.info("cluster2 zkHost = " + cluster2.getZkServer().getZkAddress()); + if (log.isInfoEnabled()) { + log.info("cluster2 zkHost = {}", cluster2.getZkServer().getZkAddress()); + } System.setProperty("cdcr.cluster2.zkHost", cluster2.getZkServer().getZkAddress()); - log.info("cluster1 zkHost = " + cluster1.getZkServer().getZkAddress()); + if (log.isInfoEnabled()) { + log.info("cluster1 zkHost = {}", cluster1.getZkServer().getZkAddress()); + } System.setProperty("cdcr.cluster1.zkHost", cluster1.getZkServer().getZkAddress()); @@ -89,7 +93,7 @@ public class CdcrBidirectionalTest extends SolrTestCaseJ4 { req.add(doc); } req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); - log.info("Adding " + docs + " docs with commit=true, numDocs=" + numDocs_c1); + log.info("Adding {} docs with commit=true, numDocs={}", docs, numDocs_c1); req.process(cluster1SolrClient); } @@ -112,7 +116,7 @@ public class CdcrBidirectionalTest extends SolrTestCaseJ4 { req.add(doc); } req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); - log.info("Adding " + docs + " docs with commit=true, numDocs=" + numDocs_c2); + log.info("Adding {} docs with commit=true, numDocs= {}", docs, numDocs_c2); req.process(cluster2SolrClient); } @@ -125,9 +129,13 @@ public class CdcrBidirectionalTest extends SolrTestCaseJ4 { // logging cdcr clusters queue response response = CdcrTestsUtil.getCdcrQueue(cluster1SolrClient); - log.info("Cdcr cluster1 queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr cluster1 queue response: {}", response.getResponse()); + } response = CdcrTestsUtil.getCdcrQueue(cluster2SolrClient); - log.info("Cdcr cluster2 queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr cluster2 queue response: {}", response.getResponse()); + } // lets find and keep the maximum version assigned by cluster1 & cluster2 across all our updates @@ -150,8 +158,8 @@ public class CdcrBidirectionalTest extends SolrTestCaseJ4 { Long checkpoint_1 = (Long) response.getResponse().get(CdcrParams.CHECKPOINT); assertNotNull(checkpoint_1); - log.info("v1: " + maxVersion_c1 + "\t" + "v2: " + maxVersion_c2 + "\t" + - "checkpoint1: " + checkpoint_1 + "\t" + "checkpoint2: " + checkpoint_2); + log.info("v1: {}\tv2: {}\tcheckpoint1: {}\tcheckpoint2: {}" + , maxVersion_c1, maxVersion_c2, checkpoint_1, checkpoint_2); assertEquals("COLLECTIONCHECKPOINT from cluster2 should have returned the maximum " + "version across all updates made to cluster1", maxVersion_c1, checkpoint_2.longValue()); @@ -203,9 +211,13 @@ public class CdcrBidirectionalTest extends SolrTestCaseJ4 { // logging cdcr clusters queue response response = CdcrTestsUtil.getCdcrQueue(cluster1SolrClient); - log.info("Cdcr cluster1 queue response at end of testcase: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr cluster1 queue response at end of testcase: {}", response.getResponse()); + } response = CdcrTestsUtil.getCdcrQueue(cluster2SolrClient); - log.info("Cdcr cluster2 queue response at end of testcase: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr cluster2 queue response at end of testcase: {}", response.getResponse()); + } CdcrTestsUtil.cdcrStop(cluster1SolrClient); CdcrTestsUtil.cdcrStop(cluster2SolrClient); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java index 70c9f26c4df..af4b0a618ce 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrBootstrapTest.java @@ -63,7 +63,9 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 { // start the target first so that we know its zkhost MiniSolrCloudCluster target = new MiniSolrCloudCluster(1, createTempDir("cdcr-target"), buildJettyConfig("/solr")); try { - log.info("Target zkHost = " + target.getZkServer().getZkAddress()); + if (log.isInfoEnabled()) { + log.info("Target zkHost = {}", target.getZkServer().getZkAddress()); + } System.setProperty("cdcr.target.zkHost", target.getZkServer().getZkAddress()); // start a cluster with no cdcr @@ -120,7 +122,9 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 { CdcrTestsUtil.cdcrStart(sourceSolrClient); response = CdcrTestsUtil.getCdcrQueue(sourceSolrClient); - log.info("Cdcr queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr queue response: {}", response.getResponse()); + } long foundDocs = CdcrTestsUtil.waitForClusterToSync(numDocs, targetSolrClient); assertEquals("Document mismatch on target after sync", numDocs, foundDocs); assertTrue(CdcrTestsUtil.assertShardInSync("cdcr-target", "shard1", targetSolrClient)); // with more than 1 replica @@ -155,7 +159,7 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 { req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); req.process(sourceSolrClient); } - log.info("Adding numDocs=" + numDocs); + log.info("Adding numDocs={}", numDocs); return numDocs; } /** @@ -197,7 +201,9 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 { CdcrTestsUtil.cdcrStart(sourceSolrClient); response = CdcrTestsUtil.getCdcrQueue(sourceSolrClient); - log.info("Cdcr queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr queue response: {}", response.getResponse()); + } long foundDocs = CdcrTestsUtil.waitForClusterToSync(numDocs, targetSolrClient); assertEquals("Document mismatch on target after sync", numDocs, foundDocs); @@ -220,7 +226,7 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 { req.add(doc); } req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); - log.info("Adding 100 docs with commit=true, numDocs=" + numDocs); + log.info("Adding 100 docs with commit=true, numDocs={}", numDocs); req.process(sourceSolrClient); } @@ -280,7 +286,9 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 { CdcrTestsUtil.cdcrStart(sourceSolrClient); response = CdcrTestsUtil.getCdcrQueue(sourceSolrClient); - log.info("Cdcr queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr queue response: {}", response.getResponse()); + } long foundDocs = CdcrTestsUtil.waitForClusterToSync(numDocs, targetSolrClient); assertEquals("Document mismatch on target after sync", numDocs, foundDocs); assertTrue("leader followers didnt' match", CdcrTestsUtil.assertShardInSync("cdcr-target", "shard1", targetSolrClient)); // with more than 1 replica @@ -301,7 +309,9 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 { // start the target first so that we know its zkhost MiniSolrCloudCluster target = new MiniSolrCloudCluster(1, createTempDir("cdcr-target"), buildJettyConfig("/solr")); try { - log.info("Target zkHost = " + target.getZkServer().getZkAddress()); + if (log.isInfoEnabled()) { + log.info("Target zkHost = {}", target.getZkServer().getZkAddress()); + } System.setProperty("cdcr.target.zkHost", target.getZkServer().getZkAddress()); MiniSolrCloudCluster source = new MiniSolrCloudCluster(1, createTempDir("cdcr-source"), buildJettyConfig("/solr")); @@ -340,7 +350,7 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 { req.add(doc); } req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); - log.info("Adding " + docs + " docs with commit=true, numDocs=" + numDocs); + log.info("Adding {} docs with commit=true, numDocs={}", docs, numDocs); req.process(sourceSolrClient); } @@ -348,7 +358,9 @@ public class CdcrBootstrapTest extends SolrTestCaseJ4 { assertEquals("", numDocs, response.getResults().getNumFound()); response = CdcrTestsUtil.getCdcrQueue(sourceSolrClient); - log.info("Cdcr queue response: " + response.getResponse()); + if (log.isInfoEnabled()) { + log.info("Cdcr queue response: {}", response.getResponse()); + } long foundDocs = CdcrTestsUtil.waitForClusterToSync(numDocs, targetSolrClient); assertEquals("Document mismatch on target after sync", numDocs, foundDocs); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrReplicationHandlerTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrReplicationHandlerTest.java index 264b62465e2..7bd371f6bfc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrReplicationHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrReplicationHandlerTest.java @@ -276,7 +276,9 @@ public class CdcrReplicationHandlerTest extends BaseCdcrDistributedZkTest { } index(SOURCE_COLLECTION, docs); numDocs.getAndAdd(10); - log.info("Sent batch of {} updates - numDocs:{}", docs.size(), numDocs); + if (log.isInfoEnabled()) { + log.info("Sent batch of {} updates - numDocs:{}", docs.size(), numDocs); + } } catch (Exception e) { throw new RuntimeException(e); diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrTestsUtil.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrTestsUtil.java index 5ea1aa9be07..869e5be32d4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrTestsUtil.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrTestsUtil.java @@ -105,7 +105,7 @@ public class CdcrTestsUtil extends SolrTestCaseJ4 { } Thread.sleep(200); } - log.error("maxVersionEncountered not found for client : " + client + "in 20 attempts"); + log.error("maxVersionEncountered not found for client : {} in 20 attempts", client); return null; } @@ -236,7 +236,9 @@ public class CdcrTestsUtil extends SolrTestCaseJ4 { if (!file.isDirectory()) { assertTrue("Path to tlog " + dir + " does not exists or it's not a directory.", false); } - log.debug("Update log dir {} contains: {}", dir, file.listFiles()); + if (log.isDebugEnabled()) { + log.debug("Update log dir {} contains: {}", dir, file.listFiles()); + } return file.listFiles().length; } diff --git a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrVersionReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrVersionReplicationTest.java index ff9afe2c6c4..6953a324018 100644 --- a/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrVersionReplicationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/cdcr/CdcrVersionReplicationTest.java @@ -82,7 +82,7 @@ public class CdcrVersionReplicationTest extends BaseCdcrDistributedZkTest { private void doTestCdcrDocVersions(SolrClient solrClient) throws Exception { this.solrServer = solrClient; - log.info("### STARTING doCdcrTestDocVersions - Add commands, client: " + solrClient); + log.info("### STARTING doCdcrTestDocVersions - Add commands, client: {}", solrClient); vadd("doc1", 10, CdcrUpdateProcessor.CDCR_UPDATE, "", vfield, "10"); vadd("doc2", 11, CdcrUpdateProcessor.CDCR_UPDATE, "", vfield, "11"); diff --git a/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java b/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java index 1156aad6725..314af7c59e2 100644 --- a/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java +++ b/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java @@ -77,7 +77,9 @@ public class SolrCoreCheckLockOnStartupTest extends SolrTestCaseJ4 { public void testNativeLockErrorOnStartup() throws Exception { File indexDir = new File(initAndGetDataDir(), "index"); - log.info("Acquiring lock on {}", indexDir.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("Acquiring lock on {}", indexDir.getAbsolutePath()); + } Directory directory = newFSDirectory(indexDir.toPath(), NativeFSLockFactory.INSTANCE); //creates a new IndexWriter without releasing the lock yet IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(null)); diff --git a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java index 90ce4f948a0..663febb18a2 100644 --- a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java +++ b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java @@ -156,7 +156,9 @@ public class TestJmxIntegration extends SolrTestCaseJ4 { if (bean==null) throw new RuntimeException("searcher was never registered"); ObjectName searcher = nameFactory.createName("gauge", registryName, "SEARCHER.searcher.*"); - log.info("Mbeans in server: " + mbeanServer.queryNames(null, null)); + if (log.isInfoEnabled()) { + log.info("Mbeans in server: {}", mbeanServer.queryNames(null, null)); + } Set objects = mbeanServer.queryMBeans(searcher, null); assertFalse("No mbean found for SolrIndexSearcher", mbeanServer.queryMBeans(searcher, null).isEmpty()); @@ -204,7 +206,7 @@ public class TestJmxIntegration extends SolrTestCaseJ4 { } int totalCoreMetrics = mgr.registry(registryName).getMetrics().size(); - log.info("Before Reload: size of all core metrics: " + totalCoreMetrics + " MBeans: " + oldNumberOfObjects); + log.info("Before Reload: size of all core metrics: {} MBeans: {}", totalCoreMetrics, oldNumberOfObjects); assertEquals("Number of registered MBeans is not the same as the number of core metrics", totalCoreMetrics, oldNumberOfObjects); h.getCoreContainer().reload(coreName); assertQ(req("q", "*:*"), "//result[@numFound='0']"); @@ -250,14 +252,14 @@ public class TestJmxIntegration extends SolrTestCaseJ4 { } } - log.info("After Reload: size of all core metrics: " + totalCoreMetrics + " MBeans: " + newNumberOfObjects); + log.info("After Reload: size of all core metrics: {} MBeans: {}", totalCoreMetrics, newNumberOfObjects); if (totalCoreMetrics != newNumberOfObjects) { Set errors = new TreeSet<>(beanNames); errors.removeAll(metricNames); - log.error("Unexpected bean names: " + errors); + log.error("Unexpected bean names: {}", errors); errors = new TreeSet<>(metricNames); errors.removeAll(beanNames); - log.error("Unexpected metric names: " + errors); + log.error("Unexpected metric names: {}", errors); fail("Number of registered MBeans is not the same as the number of core metrics: " + totalCoreMetrics + " != " + newNumberOfObjects); } } diff --git a/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java b/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java index f95d14e7dee..499dccd1710 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java +++ b/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java @@ -64,7 +64,9 @@ public class TestConfigReload extends AbstractFullDistribZkTestBase { private void reloadTest() throws Exception { SolrZkClient client = cloudClient.getZkStateReader().getZkClient(); - log.info("live_nodes_count : " + cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + if (log.isInfoEnabled()) { + log.info("live_nodes_count : {}", cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + } String confPath = ZkConfigManager.CONFIGS_ZKNODE+"/conf1/"; // checkConfReload(client, confPath + ConfigOverlay.RESOURCE_NAME, "overlay"); checkConfReload(client, confPath + SolrConfig.DEFAULT_CONF_FILE,"config", "/config"); @@ -85,7 +87,9 @@ public class TestConfigReload extends AbstractFullDistribZkTestBase { Stat newStat = client.setData(resPath, data, true); client.setData("/configs/conf1", new byte[]{1}, true); assertTrue(newStat.getVersion() > stat.getVersion()); - log.info("new_version "+ newStat.getVersion()); + if (log.isInfoEnabled()) { + log.info("new_version {}", newStat.getVersion()); + } Integer newVersion = newStat.getVersion(); long maxTimeoutSeconds = 60; DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection("collection1"); diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java index 8cc19d05523..38512652fb3 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java @@ -221,12 +221,14 @@ public class TestReplicationHandler extends SolrTestCaseJ4 { NamedList res = query(query, client); while (expectedDocCount != numFound(res) && timeSlept < 30000) { - log.info("Waiting for " + expectedDocCount + " docs"); + log.info("Waiting for {} docs", expectedDocCount); timeSlept += 100; Thread.sleep(100); res = query(query, client); } - log.info("Waited for {}ms and found {} docs", timeSlept, numFound(res)); + if (log.isInfoEnabled()) { + log.info("Waited for {}ms and found {} docs", timeSlept, numFound(res)); + } return res; } @@ -705,7 +707,9 @@ public class TestReplicationHandler extends SolrTestCaseJ4 { assertEquals(1, Integer.parseInt(getStringOrNull(slaveDetails,"timesIndexReplicated")) - failed); break; } catch (NumberFormatException | AssertionError notYet) { - log.info((retries+1)+"th attempt failure on " + notYet+" details are "+slaveDetails); + if (log.isInfoEnabled()) { + log.info("{}th attempt failure on {} details are {}", retries + 1, notYet, slaveDetails); // logOk + } if (retries>9) { log.error("giving up: ", notYet); throw notYet; @@ -1507,7 +1511,8 @@ public class TestReplicationHandler extends SolrTestCaseJ4 { long timeTakenInSeconds = TimeUnit.SECONDS.convert(timeTaken, TimeUnit.NANOSECONDS); //Let's make sure it took more than approximateTimeInSeconds to make sure that it was throttled - log.info("approximateTimeInSeconds = " + approximateTimeInSeconds + " timeTakenInSeconds = " + timeTakenInSeconds); + log.info("approximateTimeInSeconds = {} timeTakenInSeconds = {}" + , approximateTimeInSeconds, timeTakenInSeconds); assertTrue(timeTakenInSeconds - approximateTimeInSeconds > 0); } diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java index 9b997d50e60..08fc7df26c7 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java @@ -205,7 +205,9 @@ public class TestReplicationHandlerDiskOverFlow extends SolrTestCaseJ4 { .add("qt", "/replication") .add("command", ReplicationHandler.CMD_DETAILS) ); - log.info("DETAILS" + Utils.writeJson(response, new StringWriter(), true).toString()); + if (log.isInfoEnabled()) { + log.info("DETAILS {}", Utils.writeJson(response, new StringWriter(), true).toString()); + } assertEquals("slave's clearedLocalIndexFirst (from rep details)", "true", response._getStr("details/slave/clearedLocalIndexFirst", null)); } diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java index 17d42bf90f0..24d0fff92f1 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java @@ -88,7 +88,7 @@ public class TestSolrConfigHandlerConcurrent extends AbstractFullDistribZkTestBa for (List e : collectErrors) { if(!e.isEmpty()){ success = false; - log.error(e.toString()); + log.error("{}", e); } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/AutoscalingHistoryHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/AutoscalingHistoryHandlerTest.java index 935d2cd306a..750dc72c368 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/AutoscalingHistoryHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/AutoscalingHistoryHandlerTest.java @@ -276,7 +276,7 @@ public class AutoscalingHistoryHandlerTest extends SolrCloudTestCase { JettySolrRunner jetty = cluster.startJettySolrRunner(); cluster.waitForAllNodes(30); String nodeAddedName = jetty.getNodeName(); - log.info("### Added node " + nodeAddedName); + log.info("### Added node {}", nodeAddedName); boolean await = actionFiredLatch.await(60, TimeUnit.SECONDS); assertTrue("action did not execute", await); @@ -366,7 +366,7 @@ public class AutoscalingHistoryHandlerTest extends SolrCloudTestCase { } } assertNotNull("no suitable node found", nodeToKill); - log.info("### Stopping node " + nodeToKill); + log.info("### Stopping node {}", nodeToKill); for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) { if (cluster.getJettySolrRunner(i).getNodeName().equals(nodeToKill)) { JettySolrRunner j = cluster.stopJettySolrRunner(i); @@ -374,7 +374,7 @@ public class AutoscalingHistoryHandlerTest extends SolrCloudTestCase { break; } } - log.info("### Stopped node " + nodeToKill); + log.info("### Stopped node {}", nodeToKill); await = actionFiredLatch.await(60, TimeUnit.SECONDS); assertTrue("action did not execute", await); @@ -405,17 +405,19 @@ public class AutoscalingHistoryHandlerTest extends SolrCloudTestCase { QueryResponse rsp = client.query(query); SolrDocumentList docs = rsp.getResults(); if (docs.size() != expected) { - log.info("History query: " + query); - log.info("Wrong response: " + rsp); + log.info("History query: {}", query); + log.info("Wrong response: {}", rsp); ModifiableSolrParams fullQuery = params(CommonParams.QT, CommonParams.AUTOSCALING_HISTORY_PATH); - log.info("Full response: " + client.query(fullQuery)); + if (log.isInfoEnabled()) { + log.info("Full response: {}", client.query(fullQuery)); + } } assertEquals("Wrong number of documents", expected, docs.size()); return docs; } private static void waitForRecovery(String collection) throws Exception { - log.info("Waiting for recovery of " + collection); + log.info("Waiting for recovery of {}", collection); boolean recovered = false; boolean allActive = true; boolean hasLeaders = true; @@ -423,7 +425,7 @@ public class AutoscalingHistoryHandlerTest extends SolrCloudTestCase { for (int i = 0; i < 300; i++) { ClusterState state = solrClient.getZkStateReader().getClusterState(); collState = getCollectionState(collection); - log.debug("###### " + collState); + log.debug("###### {}", collState); Collection replicas = collState.getReplicas(); allActive = true; hasLeaders = true; @@ -431,11 +433,11 @@ public class AutoscalingHistoryHandlerTest extends SolrCloudTestCase { for (Replica r : replicas) { if (state.getLiveNodes().contains(r.getNodeName())) { if (!r.isActive(state.getLiveNodes())) { - log.info("Not active: " + r); + log.info("Not active: {}", r); allActive = false; } } else { - log.info("Replica no longer on a live node, ignoring: " + r); + log.info("Replica no longer on a live node, ignoring: {}", r); } } } else { @@ -450,7 +452,7 @@ public class AutoscalingHistoryHandlerTest extends SolrCloudTestCase { recovered = true; break; } else { - log.info("--- waiting, allActive=" + allActive + ", hasLeaders=" + hasLeaders); + log.info("--- waiting, allActive={}, hasLeaders={}", allActive, hasLeaders); Thread.sleep(1000); } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java index ad0816b6193..a90527b1e57 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java @@ -702,7 +702,9 @@ public class QueryElevationComponentTest extends SolrTestCaseJ4 { out.flush(); out.close(); - log.info("OUT:" + file.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("OUT: {}", file.getAbsolutePath()); + } } @Test diff --git a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java index d268a4e424d..c36a8ad714e 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java +++ b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java @@ -119,7 +119,9 @@ public class ResourceSharingTestComponent extends SearchComponent implements Sol assertEquals("bar", dict.get("foo")); assertEquals("bam", dict.get("baz")); - log.info("Loaded {} using {}", getDict().size(), this.getClass().getClassLoader()); + if (log.isInfoEnabled()) { + log.info("Loaded {} using {}", getDict().size(), this.getClass().getClassLoader()); + } // if we get here we have seen the data from the blob and all we need is to test that two collections // are able to see the same object.. diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java index a1d3f673566..9bde0fa80fe 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java @@ -62,7 +62,9 @@ public abstract class TaggerTestCase extends SolrTestCaseJ4 { public TestWatcher watchman = new TestWatcher() { @Override protected void starting(Description description) { - log.info("{} being run...", description.getDisplayName()); + if (log.isInfoEnabled()) { + log.info("{} being run...", description.getDisplayName()); + } } }; @@ -175,7 +177,7 @@ public abstract class TaggerTestCase extends SolrTestCaseJ4 { /** REMEMBER to close() the result req object. */ protected SolrQueryRequest reqDoc(String doc, SolrParams moreParams) { - log.debug("Test doc: "+doc); + log.debug("Test doc: {}", doc); SolrParams params = SolrParams.wrapDefaults(moreParams, baseParams); SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), params) {}; Iterable stream = Collections.singleton((ContentStream)new ContentStreamBase.StringStream(doc)); diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java index 3da4b1446b6..94205b2eb75 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java @@ -99,7 +99,9 @@ public class SolrJmxReporterCloudTest extends SolrCloudTestCase { QueryExp exp = Query.eq(Query.attr(JmxMetricsReporter.INSTANCE_TAG), Query.value(Integer.toHexString(v.hashCode()))); Set beans = mBeanServer.queryMBeans(null, exp); if (((SolrJmxReporter) v).isStarted() && beans.isEmpty() && jmxReporters < 2) { - log.info("DocCollection: " + getCollectionState(COLLECTION)); + if (log.isInfoEnabled()) { + log.info("DocCollection: {}", getCollectionState(COLLECTION)); + } fail("JMX reporter " + k + " for registry " + registry + " failed to register any beans!"); } else { Set categories = new HashSet<>(); @@ -109,7 +111,7 @@ public class SolrJmxReporterCloudTest extends SolrCloudTestCase { categories.add(cat); } }); - log.info("Registered categories: " + categories); + log.info("Registered categories: {}", categories); assertTrue("Too few categories: " + categories, categories.size() > 5); } }); diff --git a/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java b/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java index e17c821b75c..70575752a8b 100644 --- a/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java +++ b/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java @@ -3561,7 +3561,7 @@ public class SimpleFacetsTest extends SolrTestCaseJ4 { NamedList rangeFacetsDv; SolrQueryRequest req = req(params); - log.info("Using Params: " + params); + log.info("Using Params: {}", params); try { SolrQueryResponse rsp = h.queryAndResponse("", req); rangeFacetsFilter = (NamedList) ((NamedList) rsp.getValues().get("facet_counts")).get("facet_ranges"); diff --git a/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java b/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java index 201f9cb6352..d484b353cf2 100644 --- a/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java +++ b/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java @@ -149,7 +149,10 @@ public class TestWriterPerf extends SolrTestCaseJ4 { double decodeTime = timer.getTime(); - log.info("writer "+writerName+", size="+out.size()+", encodeRate="+(encIter*1000L/encodeTime) + ", decodeRate="+(decIter*1000L/decodeTime)); + if (log.isInfoEnabled()) { + log.info("writer {}, size={}, encodeRate={} decodeRate={}" + , writerName, out.size(), (encIter * 1000L / encodeTime), (decIter * 1000L / decodeTime)); + } req.close(); } diff --git a/solr/core/src/test/org/apache/solr/response/TestPushWriter.java b/solr/core/src/test/org/apache/solr/response/TestPushWriter.java index 7f1eddb81ce..4f6f23b4b42 100644 --- a/solr/core/src/test/org/apache/solr/response/TestPushWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestPushWriter.java @@ -50,7 +50,9 @@ public class TestPushWriter extends SolrTestCaseJ4 { new LocalSolrQueryRequest(null, new ModifiableSolrParams()), new SolrQueryResponse()); writeData(pw); osw.flush(); - log.info(new String(baos.toByteArray(), StandardCharsets.UTF_8)); + if (log.isInfoEnabled()) { + log.info("{}", new String(baos.toByteArray(), StandardCharsets.UTF_8)); + } Map m = (Map) Utils.fromJSON(baos.toByteArray()); checkValues(m); try (JavaBinCodec jbc = new JavaBinCodec(baos= new ByteArrayOutputStream(), null)) { diff --git a/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java b/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java index dbff7e208e9..24997e72220 100644 --- a/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java +++ b/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java @@ -141,7 +141,8 @@ public class ChangedSchemaMergeTest extends SolrTestCaseJ4 { changed.getUpdateHandler().commit(new CommitUpdateCommand(req, false)); changed.getUpdateHandler().commit(new CommitUpdateCommand(req, true)); } catch (Throwable e) { - log.error("Test exception, logging so not swallowed if there is a (finally) shutdown exception: " + e.getMessage(), e); + log.error("Test exception, logging so not swallowed if there is a (finally) shutdown exception: {}" + , e.getMessage(), e); throw e; } finally { if (cc != null) cc.shutdown(); diff --git a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java index dcda834dac8..73178c3c2a5 100644 --- a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java @@ -570,7 +570,7 @@ public class DocValuesTest extends SolrTestCaseJ4 { } assertU(commit()); - log.info("Indexed values: "+values); + log.info("Indexed values: {}", values); // Querying int numQueries = 10000; for (int j=0; j ps = ((MapSolrParams)req.getParams()).getMap(); final String q = ps.get("q"); ps.put("q", q.replaceAll("join score=none", "join")); - log.error("plain join: "+h.query(req)); + log.error("plain join: {}", h.query(req)); ps.put("q", q); } diff --git a/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java index c116bbf760b..645945963ef 100644 --- a/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/AuditLoggerIntegrationTest.java @@ -450,7 +450,9 @@ public class AuditLoggerIntegrationTest extends SolrCloudAuthTestCase { @Override public void run() { try { - log.info("Listening for audit callbacks on on port {}", serverSocket.getLocalPort()); + if (log.isInfoEnabled()) { + log.info("Listening for audit callbacks on on port {}", serverSocket.getLocalPort()); + } Socket socket = serverSocket.accept(); BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8)); while (!Thread.currentThread().isInterrupted()) { diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java index 09e2c0a2f09..9629a62c377 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java @@ -262,8 +262,8 @@ public class BasicAuthIntegrationTest extends SolrCloudAuthTestCase { assertTrue(obj.containsKey("uptime")); assertTrue(obj.containsKey("memory")); } catch (Exception e) { - log.error("RunExampleTool failed due to: " + e + - "; stdout from tool prior to failure: " + baos.toString(StandardCharsets.UTF_8.name())); + log.error("RunExampleTool failed due to: {}; stdout from tool prior to failure: {}" + , e, baos.toString(StandardCharsets.UTF_8.name())); // logOk } SolrParams params = new MapSolrParams(Collections.singletonMap("q", "*:*")); diff --git a/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java b/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java index c61a4c61128..cc6ad98c0e6 100644 --- a/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java @@ -60,7 +60,9 @@ public class CallbackAuditLoggerPlugin extends AuditLoggerPlugin { if (! out.checkError()) { log.error("Output stream has an ERROR!"); } - log.info("Sent audit callback {} to localhost:{}", formatter.formatEvent(event), callbackPort); + if (log.isInfoEnabled()) { + log.info("Sent audit callback {} to localhost:{}", formatter.formatEvent(event), callbackPort); + } } @Override diff --git a/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java b/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java index f1c7abb05cc..db995a9d733 100644 --- a/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java @@ -39,7 +39,9 @@ public class MockAuditLoggerPlugin extends AuditLoggerPlugin { public void audit(AuditEvent event) { events.add(event); incrementType(event.getEventType().name()); - log.info("#{} - {}", events.size(), typeCounts); + if (log.isInfoEnabled()) { + log.info("#{} - {}", events.size(), typeCounts); + } } private void incrementType(String type) { diff --git a/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java index 1062f60fcfb..d58a4993f29 100644 --- a/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java @@ -48,7 +48,7 @@ public class MockAuthorizationPlugin implements AuthorizationPlugin { return new AuthorizationResponse(200); } if (uname == null) uname = context.getParams().get("uname"); - log.info("User request: " + uname); + log.info("User request: {}", uname); if (uname == null || denyUsers.contains(uname)) return new AuthorizationResponse(403); else diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java b/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java index 6e3172d050f..f692d42b6e2 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java @@ -99,15 +99,15 @@ public class RandomTestDictionaryFactory extends DictionaryFactory { ++emittedItems; next = new BytesRef(TestUtil.randomUnicodeString(LuceneTestCase.random(), MAX_LENGTH)); if (emittedItems % 1000 == 0) { - log.info(enabledSysProp + " emitted " + emittedItems + " items."); + log.info("{} emitted {} items", enabledSysProp, emittedItems); } } else { - log.info(enabledSysProp + " disabled after emitting " + emittedItems + " items."); + log.info("{} disabled after emitting {} items", enabledSysProp, emittedItems); System.clearProperty(enabledSysProp); // disable once maxItems has been reached emittedItems = 0L; } } else { - log.warn(enabledSysProp + " invoked when disabled"); + log.warn("{} invoked when disabled", enabledSysProp); emittedItems = 0L; } return next; diff --git a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java index 76d9acee670..f3d32ba5525 100644 --- a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java @@ -386,15 +386,21 @@ public class DirectUpdateHandlerTest extends SolrTestCaseJ4 { DirectoryReader r = sr.getSearcher().getIndexReader(); Directory d = r.directory(); - log.info("FILES before addDoc="+ Arrays.asList(d.listAll())); + if (log.isInfoEnabled()) { + log.info("FILES before addDoc={}", Arrays.asList(d.listAll())); + } assertU(adoc("id", "1")); int nFiles = d.listAll().length; - log.info("FILES before prepareCommit="+ Arrays.asList(d.listAll())); + if (log.isInfoEnabled()) { + log.info("FILES before prepareCommit={}", Arrays.asList(d.listAll())); + } updateJ("", params("prepareCommit", "true")); - log.info("FILES after prepareCommit="+Arrays.asList(d.listAll())); + if (log.isInfoEnabled()) { + log.info("FILES after prepareCommit={}", Arrays.asList(d.listAll())); + } assertTrue( d.listAll().length > nFiles); // make sure new index files were actually written assertJQ(req("q", "id:1") diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java index 418aead206d..3831100e885 100644 --- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java +++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java @@ -290,7 +290,9 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { // order the updates correctly for NONLEADER 1 for (UpdateRequest update : updates) { - log.info("Issuing well ordered update: " + update.getDocuments()); + if (log.isInfoEnabled()) { + log.info("Issuing well ordered update: {}", update.getDocuments()); + } NONLEADERS.get(1).request(update); } @@ -383,7 +385,7 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { // number of docs we're testing (0 <= id), index may contain additional random docs (id < 0) int numDocs = atLeast(100); if (onlyLeaderIndexes) numDocs = TestUtil.nextInt(random(), 10, 50); - log.info("Trying num docs = " + numDocs); + log.info("Trying num docs = {}", numDocs); final List ids = new ArrayList(numDocs); for (int id = 0; id < numDocs; id++) { ids.add(id); @@ -400,7 +402,7 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { luceneDocids.add((Integer) doc.get("[docid]")); valuesList.add((Float) doc.get("inplace_updatable_float")); } - log.info("Initial results: "+results); + log.info("Initial results: {}", results); // before we do any atomic operations, sanity check our results against all clients assertDocIdsAndValuesAgainstAllClients("sanitycheck", params, luceneDocids, "inplace_updatable_float", valuesList); @@ -415,7 +417,7 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { assert -5.0F <= value && value <= 5.0F; valuesList.set(id, value); } - log.info("inplace_updatable_float: " + valuesList); + log.info("inplace_updatable_float: {}", valuesList); // update doc w/ set Collections.shuffle(ids, r); // so updates aren't applied in index order @@ -743,7 +745,9 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { // order the updates correctly for NONLEADER 1 for (UpdateRequest update : updates) { - log.info("Issuing well ordered update: " + update.getDocuments()); + if (log.isInfoEnabled()) { + log.info("Issuing well ordered update: {}", update.getDocuments()); + } NONLEADERS.get(1).request(update); } @@ -774,7 +778,9 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { // assert both replicas have same effect for (SolrClient client : NONLEADERS) { // 0th is re-ordered replica, 1st is well-ordered replica - log.info("Testing client: " + ((HttpSolrClient)client).getBaseURL()); + if (log.isInfoEnabled()) { + log.info("Testing client: {}", ((HttpSolrClient) client).getBaseURL()); + } assertReplicaValue(client, 0, "inplace_updatable_float", (newinplace_updatable_float + (float)(updates.size() - 1)), "inplace_updatable_float didn't match for replica at client: " + ((HttpSolrClient)client).getBaseURL()); assertReplicaValue(client, 0, "title_s", "title0_new", @@ -812,7 +818,9 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { // order the updates correctly for NONLEADER 1 for (UpdateRequest update : updates) { - log.info("Issuing well ordered update: " + update.getDocuments()); + if (log.isInfoEnabled()) { + log.info("Issuing well ordered update: {}", update.getDocuments()); + } NONLEADERS.get(1).request(update); } @@ -884,7 +892,9 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { // order the updates correctly for NONLEADER 1 for (UpdateRequest update : updates) { - log.info("Issuing well ordered update: " + update.getDocuments()); + if (log.isInfoEnabled()) { + log.info("Issuing well ordered update: {}", update.getDocuments()); + } NONLEADERS.get(1).request(update); } @@ -932,15 +942,17 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { } // All should succeed, i.e. no LIR assertEquals(updateResponses.size(), successful); - - log.info("Non leader 0: "+((HttpSolrClient)NONLEADERS.get(0)).getBaseURL()); - log.info("Non leader 1: "+((HttpSolrClient)NONLEADERS.get(1)).getBaseURL()); + + if (log.isInfoEnabled()) { + log.info("Non leader 0: {}", ((HttpSolrClient) NONLEADERS.get(0)).getBaseURL()); + log.info("Non leader 1: {}", ((HttpSolrClient) NONLEADERS.get(1)).getBaseURL()); // logOk + } SolrDocument doc0 = NONLEADERS.get(0).getById(String.valueOf(0), params("distrib", "false")); SolrDocument doc1 = NONLEADERS.get(1).getById(String.valueOf(0), params("distrib", "false")); - log.info("Doc in both replica 0: "+doc0); - log.info("Doc in both replica 1: "+doc1); + log.info("Doc in both replica 0: {}", doc0); + log.info("Doc in both replica 1: {}", doc1); // assert both replicas have same effect for (int i=0; i Started solr server port=" + port + " home="+getBaseDir()); + if (log.isInfoEnabled()) { + log.info("===> Started solr server port={} home={}", port, getBaseDir()); + } } public void stop() throws Exception { diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java index 704b4040746..b94aca327a6 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java @@ -517,8 +517,10 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { } SSLTestConfig result = sslRandomizer.createSSLTestConfig(); - log.info("Randomized ssl ({}) and clientAuth ({}) via: {}", - result.isSSLMode(), result.isClientAuthMode(), sslRandomizer.debug); + if (log.isInfoEnabled()) { + log.info("Randomized ssl ({}) and clientAuth ({}) via: {}", + result.isSSLMode(), result.isClientAuthMode(), sslRandomizer.debug); + } return result; } @@ -593,12 +595,16 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { @Override public void setUp() throws Exception { super.setUp(); - log.info("###Starting " + getTestName()); // returns ??? + if (log.isInfoEnabled()) { + log.info("###Starting {}", getTestName()); // returns ??? + } } @Override public void tearDown() throws Exception { - log.info("###Ending " + getTestName()); + if (log.isInfoEnabled()) { + log.info("###Ending {}", getTestName()); + } super.tearDown(); } @@ -625,7 +631,9 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { final int id = dataDirCount.incrementAndGet(); dataDir = initCoreDataDir = createTempDir("data-dir-"+ id).toFile(); assertNotNull(dataDir); - log.info("Created dataDir: {}", dataDir.getAbsolutePath()); + if (log.isInfoEnabled()) { + log.info("Created dataDir: {}", dataDir.getAbsolutePath()); + } } return dataDir; } @@ -671,7 +679,7 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { numCloses = SolrIndexSearcher.numCloses.getAndSet(0); if (numOpens != 0 || numCloses != 0) { // NOTE: some other tests don't use this base class and hence won't reset the counts. - log.warn("startTrackingSearchers: numOpens="+numOpens+" numCloses="+numCloses); + log.warn("startTrackingSearchers: numOpens={} numCloses={}", numOpens, numCloses); numOpens = numCloses = 0; } } @@ -857,7 +865,9 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { * to log the fact that their setUp process has ended. */ public void postSetUp() { - log.info("####POSTSETUP " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####POSTSETUP {}", getTestName()); + } } @@ -867,7 +877,9 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { * tearDown method. */ public void preTearDown() { - log.info("####PRETEARDOWN " + getTestName()); + if (log.isInfoEnabled()) { + log.info("####PRETEARDOWN {}", getTestName()); + } } /** @@ -1012,7 +1024,7 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { failed = false; } finally { if (failed) { - log.error("REQUEST FAILED: " + req.getParamString()); + log.error("REQUEST FAILED: {}", req.getParamString()); } } @@ -1061,7 +1073,7 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { failed = false; } finally { if (failed) { - log.error("REQUEST FAILED: " + req.getParamString()); + log.error("REQUEST FAILED: {}", req.getParamString()); } } @@ -1074,19 +1086,15 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { String err = JSONTestUtil.match(response, testJSON, delta); failed = false; if (err != null) { - log.error("query failed JSON validation. error=" + err + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + req.getParamString() + log.error("query failed JSON validation. error={}\n expected ={}\n response = {}\n request = {}" + , err, testJSON, response, req.getParamString() ); throw new RuntimeException(err); } } finally { if (failed) { - log.error("JSON query validation threw an exception." + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + req.getParamString() + log.error("JSON query validation threw an exception.\n expected ={} \n response = {}\n request = {}" + , testJSON, response, req.getParamString() ); } } @@ -2956,7 +2964,9 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { } else { System.setProperty(UPDATELOG_SYSPROP,"solr.UpdateLog"); } - log.info("updateLog impl={}", System.getProperty(UPDATELOG_SYSPROP)); + if (log.isInfoEnabled()) { + log.info("updateLog impl={}", System.getProperty(UPDATELOG_SYSPROP)); + } } /** @@ -2982,7 +2992,7 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { if (RandomizedContext.current().getTargetClass().isAnnotationPresent(SolrTestCaseJ4.SuppressPointFields.class) || (! usePoints)) { - log.info("Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP="+useDV); + log.info("Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP={}", useDV); org.apache.solr.schema.PointField.TEST_HACK_IGNORE_USELESS_TRIEFIELD_ARGS = false; private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Integer.class, "solr.TrieIntField"); @@ -2994,7 +3004,7 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { System.setProperty(NUMERIC_POINTS_SYSPROP, "false"); } else { - log.info("Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP="+useDV); + log.info("Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP={}", useDV); org.apache.solr.schema.PointField.TEST_HACK_IGNORE_USELESS_TRIEFIELD_ARGS = true; private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Integer.class, "solr.IntPointField"); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java index a58c39bc826..5bf7bbbfaf2 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java @@ -204,7 +204,8 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes public static void waitForCollectionToDisappear(String collection, ZkStateReader zkStateReader, boolean failOnTimeout, int timeoutSeconds) throws Exception { - log.info("Wait for collection to disappear - collection: " + collection + " failOnTimeout:" + failOnTimeout + " timeout (sec):" + timeoutSeconds); + log.info("Wait for collection to disappear - collection: {} failOnTimeout:{} timeout (sec):{}" + , collection, failOnTimeout, timeoutSeconds); zkStateReader.waitForState(collection, timeoutSeconds, TimeUnit.SECONDS, (docCollection) -> docCollection == null); log.info("Collection has disappeared - collection:{}", collection); @@ -221,7 +222,10 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes DocCollection coll = clusterState.getCollection("collection1"); Slice slice = coll.getSlice(shardName); if (slice.getLeader() != null && !slice.getLeader().equals(oldLeader) && slice.getLeader().getState() == Replica.State.ACTIVE) { - log.info("Old leader {}, new leader {}. New leader got elected in {} ms", oldLeader, slice.getLeader(),timeOut.timeElapsed(MILLISECONDS) ); + if (log.isInfoEnabled()) { + log.info("Old leader {}, new leader {}. New leader got elected in {} ms" + , oldLeader, slice.getLeader(), timeOut.timeElapsed(MILLISECONDS)); + } break; } @@ -240,7 +244,10 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes Slice slice = docCollection.getSlice(shardName); if (slice != null && slice.getLeader() != null && !slice.getLeader().equals(oldLeader) && slice.getLeader().getState() == Replica.State.ACTIVE) { - log.info("Old leader {}, new leader {}. New leader got elected in {} ms", oldLeader, slice.getLeader(), timeOut.timeElapsed(MILLISECONDS) ); + if (log.isInfoEnabled()) { + log.info("Old leader {}, new leader {}. New leader got elected in {} ms" + , oldLeader, slice.getLeader(), timeOut.timeElapsed(MILLISECONDS)); + } return true; } return false; diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java index 1361af93ebf..cadf36a3205 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java @@ -419,8 +419,11 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes ExecutorService customThreadPool = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("closeThreadPool")); int numOtherReplicas = numJettys - getPullReplicaCount() * sliceCount; - - log.info("Creating jetty instances pullReplicaCount={} numOtherReplicas={}", getPullReplicaCount(), numOtherReplicas); + + if (log.isInfoEnabled()) { + log.info("Creating jetty instances pullReplicaCount={} numOtherReplicas={}" + , getPullReplicaCount(), numOtherReplicas); + } int addedReplicas = 0; for (int i = 1; i <= numJettys; i++) { @@ -435,7 +438,10 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes if (numOtherReplicas > 0) { numOtherReplicas--; if (useTlogReplicas()) { - log.info("create jetty {} in directory {} of type {} in shard {}", i, jettyDir, Replica.Type.TLOG, ((currentI % sliceCount) + 1)); + if (log.isInfoEnabled()) { + log.info("create jetty {} in directory {} of type {} in shard {}" + , i, jettyDir, Replica.Type.TLOG, ((currentI % sliceCount) + 1)); // logOk + } customThreadPool.submit(() -> { try { JettySolrRunner j = createJetty(jettyDir, useJettyDataDir ? getDataDir(testDir + "/jetty" @@ -464,7 +470,10 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes addedReplicas++; } else { - log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.NRT, ((currentI % sliceCount) + 1)); + if (log.isInfoEnabled()) { + log.info("create jetty {} in directory {} of type {} for shard{}" + , i, jettyDir, Replica.Type.NRT, ((currentI % sliceCount) + 1)); // logOk + } customThreadPool.submit(() -> { try { @@ -492,7 +501,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes addedReplicas++; } } else { - log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.PULL, ((currentI % sliceCount) + 1)); + log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.PULL, ((currentI % sliceCount) + 1)); // logOk customThreadPool.submit(() -> { try { JettySolrRunner j = createJetty(jettyDir, useJettyDataDir ? getDataDir(testDir + "/jetty" @@ -588,7 +597,9 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes } protected void waitForLiveNode(JettySolrRunner j) throws InterruptedException, TimeoutException { - log.info("waitForLiveNode: {}", j.getNodeName()); + if (log.isInfoEnabled()) { + log.info("waitForLiveNode: {}", j.getNodeName()); + } cloudClient.getZkStateReader().waitForLiveNodes(30, TimeUnit.SECONDS, SolrCloudTestCase.containsLiveNode(j.getNodeName())); } @@ -945,10 +956,10 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes } catch (Exception exc) { Throwable rootCause = SolrException.getRootCause(exc); if (++numRetries <= maxRetries) { - log.warn("ERROR: " + rootCause + " ... Sleeping for " + waitBeforeRetry + " seconds before re-try ..."); + log.warn("ERROR: {} ... Sleeping for {} seconds before re-try ...", rootCause, waitBeforeRetry); Thread.sleep(waitBeforeRetry * 1000L); } else { - log.error("No more retries available! Add batch failed due to: " + rootCause); + log.error("No more retries available! Add batch failed due to: {}", rootCause); throw exc; } } @@ -1418,7 +1429,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes } protected void enableAutoSoftCommit(int time) { - log.info("Turning on auto soft commit: " + time); + log.info("Turning on auto soft commit: {}", time); for (List jettyList : shardToJetty.values()) { for (CloudJettyRunner jetty : jettyList) { CoreContainer cores = jetty.jetty.getCoreContainer(); @@ -1802,7 +1813,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes } params.set("name", collectionName); if ("1".equals(getStateFormat()) ) { - log.info("Creating collection with stateFormat=1: " + collectionName); + log.info("Creating collection with stateFormat=1: {}", collectionName); params.set(DocCollection.STATE_FORMAT, "1"); } SolrRequest request = new QueryRequest(params); @@ -1981,8 +1992,10 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes random().nextBoolean(), 5000, 120000); commonCloudSolrClient.setDefaultCollection(DEFAULT_COLLECTION); commonCloudSolrClient.connect(); - log.info("Created commonCloudSolrClient with updatesToLeaders={} and parallelUpdates={}", - commonCloudSolrClient.isUpdatesToLeaders(), commonCloudSolrClient.isParallelUpdates()); + if (log.isInfoEnabled()) { + log.info("Created commonCloudSolrClient with updatesToLeaders={} and parallelUpdates={}", + commonCloudSolrClient.isUpdatesToLeaders(), commonCloudSolrClient.isParallelUpdates()); + } } } return commonCloudSolrClient; @@ -2119,14 +2132,18 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes leader = shard.getLeader(); assertNotNull(leader); - log.info("Found "+replicas.size()+" replicas and leader on "+ - leader.getNodeName()+" for "+shardId+" in "+testCollectionName); + if (log.isInfoEnabled()) { + log.info("Found {} replicas and leader on {} for {} in {}" + , replicas.size(), leader.getNodeName(), shardId, testCollectionName); + } // ensure all replicas are "active" and identify the non-leader replica for (Replica replica : replicas) { if (!zkShardTerms.canBecomeLeader(replica.getName()) || replica.getState() != Replica.State.ACTIVE) { - log.info("Replica {} is currently {}", replica.getName(), replica.getState()); + if (log.isInfoEnabled()) { + log.info("Replica {} is currently {}", replica.getName(), replica.getState()); + } allReplicasUp = false; } @@ -2150,7 +2167,9 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes if (notLeaders.isEmpty()) fail("Didn't isolate any replicas that are not the leader! ClusterState: " + printClusterStateInfo()); - log.info("Took {} ms to see all replicas become active.", timer.getTime()); + if (log.isInfoEnabled()) { + log.info("Took {} ms to see all replicas become active.", timer.getTime()); + } List replicas = new ArrayList<>(notLeaders.values()); return replicas; @@ -2186,7 +2205,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes Thread.sleep(1000); // send reload command for the collection - log.info("Sending RELOAD command for "+testCollectionName); + log.info("Sending RELOAD command for {}", testCollectionName); ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.RELOAD.toString()); params.set("name", testCollectionName); @@ -2231,7 +2250,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes logReplicationDetails(r, builder); } } - log.info("Summary of the cluster: " + builder.toString()); + log.info("Summary of the cluster: {}", builder); } protected void waitForReplicationFromReplicas(String collectionName, ZkStateReader zkStateReader, TimeOut timeout) throws KeeperException, InterruptedException, IOException { @@ -2265,7 +2284,10 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes while (true) { long replicaIndexVersion = getIndexVersion(pullReplica); if (leaderIndexVersion == replicaIndexVersion) { - log.info("Leader replica's version ({}) in sync with replica({}): {} == {}", leader.getName(), pullReplica.getName(), leaderIndexVersion, replicaIndexVersion); + if (log.isInfoEnabled()) { + log.info("Leader replica's version ({}) in sync with replica({}): {} == {}" + , leader.getName(), pullReplica.getName(), leaderIndexVersion, replicaIndexVersion); + } // Make sure the host is serving the correct version try (SolrCore core = containers.get(pullReplica.getNodeName()).getCore(pullReplica.getCoreName())) { @@ -2276,7 +2298,10 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes if (Long.parseLong(servingVersion) == replicaIndexVersion) { break; } else { - log.info("Replica {} has the correct version replicated, but the searcher is not ready yet. Replicated version: {}, Serving version: {}", pullReplica.getName(), replicaIndexVersion, servingVersion); + if (log.isInfoEnabled()) { + log.info("Replica {} has the correct version replicated, but the searcher is not ready yet. Replicated version: {}, Serving version: {}" + , pullReplica.getName(), replicaIndexVersion, servingVersion); + } } } finally { if (ref != null) ref.decref(); @@ -2288,9 +2313,15 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes fail(String.format(Locale.ROOT, "Timed out waiting for replica %s (%d) to replicate from leader %s (%d)", pullReplica.getName(), replicaIndexVersion, leader.getName(), leaderIndexVersion)); } if (leaderIndexVersion > replicaIndexVersion) { - log.info("{} version is {} and leader's is {}, will wait for replication", pullReplica.getName(), replicaIndexVersion, leaderIndexVersion); + if (log.isInfoEnabled()) { + log.info("{} version is {} and leader's is {}, will wait for replication" + , pullReplica.getName(), replicaIndexVersion, leaderIndexVersion); + } } else { - log.info("Leader replica's version ({}) is lower than pull replica({}): {} < {}", leader.getName(), pullReplica.getName(), leaderIndexVersion, replicaIndexVersion); + if (log.isInfoEnabled()) { + log.info("Leader replica's version ({}) is lower than pull replica({}): {} < {}" + , leader.getName(), pullReplica.getName(), leaderIndexVersion, replicaIndexVersion); + } } } Thread.sleep(1000); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java index ee489d04141..7dfdc36d7ce 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java @@ -518,11 +518,11 @@ public class ChaosMonkey { } public static void monkeyLog(String msg) { - log.info("monkey: " + msg); + log.info("monkey: {}", msg); } public static void monkeyLog(String msg, Object...logParams) { - log.info("monkey: " + msg, logParams); + log.info("monkey: {}", msg, logParams); } public void stopTheMonkey() { @@ -654,7 +654,7 @@ public class ChaosMonkey { for (Slice slice:docCollection.getSlices()) { builder.append(slice.getName()).append(": {"); for (Replica replica:slice.getReplicas()) { - log.info(replica.toString()); + log.info("{}", replica); java.util.regex.Matcher m = portPattern.matcher(replica.getBaseUrl()); m.find(); String jettyPort = m.group(1); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/CloudInspectUtil.java b/solr/test-framework/src/java/org/apache/solr/cloud/CloudInspectUtil.java index a72f06bb7ab..499f0279cf6 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/CloudInspectUtil.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/CloudInspectUtil.java @@ -187,12 +187,12 @@ public class CloudInspectUtil { q = SolrTestCaseJ4.params("q", "*:*", "rows", "100000", "fl", "id", "tests", "checkShardConsistency(vsControl)/getIds"); // add a tag to aid in debugging via logs controlDocList = controlClient.query(q).getResults(); if (controlDocs != controlDocList.getNumFound()) { - log.error("Something changed! control now " + controlDocList.getNumFound()); + log.error("Something changed! control now {}", controlDocList.getNumFound()); } cloudDocList = cloudClient.query(q).getResults(); if (cloudClientDocs != cloudDocList.getNumFound()) { - log.error("Something changed! cloudClient now " + cloudDocList.getNumFound()); + log.error("Something changed! cloudClient now {}", cloudDocList.getNumFound()); } if (controlDocs != cloudClientDocs && (addFails != null || deleteFails != null)) { @@ -230,7 +230,7 @@ public class CloudInspectUtil { SolrDocumentList a = controlClient.query(q, SolrRequest.METHOD.POST).getResults(); SolrDocumentList b = cloudClient.query(q, SolrRequest.METHOD.POST).getResults(); - log.error("controlClient :" + a + "\n\tcloudClient :" + b); + log.error("controlClient :{}\n\tcloudClient :{}", a, b); } } catch (Exception e) { // swallow any exceptions, this is just useful for producing debug output, diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/IpTables.java b/solr/test-framework/src/java/org/apache/solr/cloud/IpTables.java index 4ddbd27f598..7e73f6acc56 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/IpTables.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/IpTables.java @@ -37,7 +37,7 @@ public class IpTables { public static void blockPort(int port) throws IOException, InterruptedException { if (ENABLED) { - log.info("Block port with iptables: " + port); + log.info("Block port with iptables: {}", port); BLOCK_PORTS.add(port); runCmd(("iptables -A INPUT -p tcp --dport " + port + " -j DROP") .split("\\s")); @@ -48,7 +48,7 @@ public class IpTables { public static void unblockPort(int port) throws IOException, InterruptedException { if (ENABLED && BLOCK_PORTS.contains(port)) { - log.info("Unblock port with iptables: " + port); + log.info("Unblock port with iptables: {}", port); runCmd(("iptables -D INPUT -p tcp --dport " + port + " -j DROP") .split("\\s")); runCmd(("iptables -D OUTPUT -p tcp --dport " + port + " -j DROP") diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java index 4df46dfcf5c..f65374f3578 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java @@ -355,7 +355,9 @@ public class MiniSolrCloudCluster { public void waitForNode(JettySolrRunner jetty, int timeoutSeconds) throws IOException, InterruptedException, TimeoutException { - log.info("waitForNode: {}", jetty.getNodeName()); + if (log.isInfoEnabled()) { + log.info("waitForNode: {}", jetty.getNodeName()); + } ZkStateReader reader = getSolrClient().getZkStateReader(); @@ -693,7 +695,9 @@ public class MiniSolrCloudCluster { zkClient.getSolrZooKeeper().closeCnxn(); long sessionId = zkClient.getSolrZooKeeper().getSessionId(); zkServer.expire(sessionId); - log.info("Expired zookeeper session {} from node {}", sessionId, jetty.getBaseUrl()); + if (log.isInfoEnabled()) { + log.info("Expired zookeeper session {} from node {}", sessionId, jetty.getBaseUrl()); + } } } @@ -794,7 +798,9 @@ public class MiniSolrCloudCluster { } public void waitForJettyToStop(JettySolrRunner runner) throws TimeoutException { - log.info("waitForJettyToStop: {}", runner.getLocalPort()); + if (log.isInfoEnabled()) { + log.info("waitForJettyToStop: {}", runner.getLocalPort()); + } TimeOut timeout = new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME); while(!timeout.hasTimedOut()) { if (runner.isStopped()) { diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java index fccb4abdeec..9c34fac9de3 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java @@ -347,7 +347,9 @@ public class SolrCloudTestCase extends SolrTestCaseJ4 { return (liveNodes, collectionState) -> { if (collectionState == null) return false; - log.info("active slice count: " + collectionState.getActiveSlices().size() + " expected:" + expectedShards); + if (log.isInfoEnabled()) { + log.info("active slice count: {} expected: {}", collectionState.getActiveSlices().size(), expectedShards); + } if (collectionState.getActiveSlices().size() != expectedShards) return false; return compareActiveReplicaCountsForShards(expectedReplicas, liveNodes, collectionState); @@ -389,7 +391,7 @@ public class SolrCloudTestCase extends SolrTestCaseJ4 { } } - log.info("active replica count: " + activeReplicas + " expected replica count: " + expectedReplicas); + log.info("active replica count: {} expected replica count: {}", activeReplicas, expectedReplicas); return activeReplicas == expectedReplicas; diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/StoppableCommitThread.java b/solr/test-framework/src/java/org/apache/solr/cloud/StoppableCommitThread.java index 4d60b4e06e1..17b3eb6cd69 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/StoppableCommitThread.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/StoppableCommitThread.java @@ -58,7 +58,9 @@ public class StoppableCommitThread extends StoppableThread { break; } } - log.info("StoppableCommitThread finished. Committed {} times. Failed {} times.", numCommits.get(), numFails.get()); + if (log.isInfoEnabled()) { + log.info("StoppableCommitThread finished. Committed {} times. Failed {} times.", numCommits.get(), numFails.get()); + } } @Override diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/StoppableSearchThread.java b/solr/test-framework/src/java/org/apache/solr/cloud/StoppableSearchThread.java index c579f22def3..7e55231dd43 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/StoppableSearchThread.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/StoppableSearchThread.java @@ -68,7 +68,7 @@ class StoppableSearchThread extends AbstractFullDistribZkTestBase.StoppableThrea } } - log.info("num searches done:" + numSearches + " with " + queryFails + " fails"); + log.info("num searches done: {} with {} fails", numSearches, queryFails); } @Override diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java b/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java index 7195cd1f462..b1af9d95d71 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java @@ -178,7 +178,9 @@ public class ZkTestServer { } public void updateForFire(WatchedEvent event) { - log.debug("Watch fired: {}: {}", desc, event.getPath()); + if (log.isDebugEnabled()) { + log.debug("Watch fired: {}: {}", desc, event.getPath()); + } counters.decrementAndGet(event.getPath()); } @@ -559,7 +561,7 @@ public class ZkTestServer { } else { this.clientPortAddress = new InetSocketAddress(clientPort); } - log.info("client port:" + this.clientPortAddress); + log.info("client port: {}", this.clientPortAddress); } }; try { @@ -592,7 +594,7 @@ public class ZkTestServer { } cnt++; } - log.info("start zk server on port:" + port); + log.info("start zk server on port: {}", port); waitForServerUp(getZkHost(), 30000); @@ -705,7 +707,7 @@ public class ZkTestServer { public static String send4LetterWord(String host, int port, String cmd) throws IOException { - log.info("connecting to " + host + " " + port); + log.info("connecting to {} {}", host, port); BufferedReader reader = null; try (Socket sock = new Socket(host, port)) { OutputStream outstream = sock.getOutputStream(); @@ -730,7 +732,7 @@ public class ZkTestServer { } public static List parseHostPortList(String hplist) { - log.info("parse host and port list: " + hplist); + log.info("parse host and port list: {}", hplist); ArrayList alist = new ArrayList<>(); for (String hp : hplist.split(",")) { int idx = hp.lastIndexOf(':'); @@ -803,7 +805,9 @@ public class ZkTestServer { File file = new File(solrhome, "collection1" + File.separator + "conf" + File.separator + srcName); if (!file.exists()) { - log.info("skipping " + file.getAbsolutePath() + " because it doesn't exist"); + if (log.isInfoEnabled()) { + log.info("skipping {} because it doesn't exist", file.getAbsolutePath()); + } return; } @@ -811,7 +815,9 @@ public class ZkTestServer { if (zkChroot != null) { destPath = zkChroot + destPath; } - log.info("put " + file.getAbsolutePath() + " to " + destPath); + if (log.isInfoEnabled()) { + log.info("put {} to {}", file.getAbsolutePath(), destPath); + } zkClient.makePath(destPath, file, false, true); } diff --git a/solr/test-framework/src/java/org/apache/solr/util/RandomMergePolicy.java b/solr/test-framework/src/java/org/apache/solr/util/RandomMergePolicy.java index b930a4ff580..89c0c55fc63 100644 --- a/solr/test-framework/src/java/org/apache/solr/util/RandomMergePolicy.java +++ b/solr/test-framework/src/java/org/apache/solr/util/RandomMergePolicy.java @@ -39,7 +39,9 @@ public class RandomMergePolicy extends FilterMergePolicy { protected RandomMergePolicy(MergePolicy inner) { super(inner); - log.info("RandomMergePolicy wrapping {}: {}", inner.getClass(), inner); + if (log.isInfoEnabled()) { + log.info("RandomMergePolicy wrapping {}: {}", inner.getClass(), inner); + } } } diff --git a/solr/test-framework/src/java/org/apache/solr/util/RestTestBase.java b/solr/test-framework/src/java/org/apache/solr/util/RestTestBase.java index c8dda8747af..1cd90bea126 100644 --- a/solr/test-framework/src/java/org/apache/solr/util/RestTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/util/RestTestBase.java @@ -176,7 +176,7 @@ abstract public class RestTestBase extends SolrJettyTestBase { failed = false; } finally { if (failed) { - log.error("REQUEST FAILED: " + request); + log.error("REQUEST FAILED: {}", request); } } @@ -227,7 +227,7 @@ abstract public class RestTestBase extends SolrJettyTestBase { failed = false; } finally { if (failed) { - log.error("REQUEST FAILED: " + request); + log.error("REQUEST FAILED: {}", request); } } @@ -240,20 +240,16 @@ abstract public class RestTestBase extends SolrJettyTestBase { String err = JSONTestUtil.match(response, testJSON, delta); failed = false; if (err != null) { - log.error("query failed JSON validation. error=" + err + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + request + "\n" - ); + log.error("query failed JSON validation. error={}" + + "\n expected ={}\n response = {}\n request = {}\n" + , err, testJSON, response, request); throw new RuntimeException(err); } } finally { if (failed) { - log.error("JSON query validation threw an exception." + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + request + "\n" - ); + log.error("JSON query validation threw an exception." + +"\n expected ={}\n response = {}\n request = {}\n" + , testJSON, response, request); } } } @@ -308,7 +304,7 @@ abstract public class RestTestBase extends SolrJettyTestBase { failed = false; } finally { if (failed) { - log.error("REQUEST FAILED: " + request); + log.error("REQUEST FAILED: {}", request); } } @@ -321,20 +317,16 @@ abstract public class RestTestBase extends SolrJettyTestBase { String err = JSONTestUtil.match(response, testJSON, delta); failed = false; if (err != null) { - log.error("query failed JSON validation. error=" + err + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + request + "\n" - ); + log.error("query failed JSON validation. error={}" + + "\n expected ={}\n response = {}\n request = {}\n" + ,err, testJSON, response, request); throw new RuntimeException(err); } } finally { if (failed) { - log.error("JSON query validation threw an exception." + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + request + "\n" - ); + log.error("JSON query validation threw an exception." + + "\n expected ={}\n response = {}\n request = {}" + , testJSON, response, request); } } } @@ -387,7 +379,7 @@ abstract public class RestTestBase extends SolrJettyTestBase { failed = false; } finally { if (failed) { - log.error("REQUEST FAILED: " + request); + log.error("REQUEST FAILED: {}", request); } } @@ -400,20 +392,16 @@ abstract public class RestTestBase extends SolrJettyTestBase { String err = JSONTestUtil.match(response, testJSON, delta); failed = false; if (err != null) { - log.error("query failed JSON validation. error=" + err + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + request + "\n" - ); + log.error("query failed JSON validation. error={}" + + "\n expected ={}\n response = {}\n request = {}\n" + , err, testJSON, response, request); throw new RuntimeException(err); } } finally { if (failed) { log.error("JSON query validation threw an exception." + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + request + "\n" - ); + "\n expected ={}\n response = {}\n request = {}\n" + ,testJSON, response, request); } } } @@ -454,7 +442,7 @@ abstract public class RestTestBase extends SolrJettyTestBase { failed = false; } finally { if (failed) { - log.error("REQUEST FAILED: " + request); + log.error("REQUEST FAILED: {}", request); } } @@ -467,19 +455,16 @@ abstract public class RestTestBase extends SolrJettyTestBase { String err = JSONTestUtil.match(response, testJSON, delta); failed = false; if (err != null) { - log.error("query failed JSON validation. error=" + err + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + request + "\n" - ); + log.error("query failed JSON validation. error={}\n expected ={}" + + "\n response = {}\n request = {}" + , err, testJSON, response, request); throw new RuntimeException(err); } } finally { if (failed) { - log.error("JSON query validation threw an exception." + - "\n expected =" + testJSON + - "\n response = " + response + - "\n request = " + request + "\n" + log.error("JSON query validation threw an exception.\n expected ={}\n" + + "\n response = {}\n request = {}" + , testJSON, response, request ); } }