LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects

This commit is contained in:
Erick Erickson 2020-04-30 19:50:31 -04:00
parent 9ed5b6a218
commit 9ae05e9b4f
173 changed files with 1173 additions and 967 deletions

View File

@ -40,7 +40,8 @@ configure(rootProject) {
"javadoc",
"rat",
"ecjLint",
"checkMissingDocs"
"checkMissingDocs",
"validateLogCalls"
]}
}
}

View File

@ -17,147 +17,33 @@
import org.apache.tools.ant.BuildException
import static java.util.concurrent.TimeUnit.SECONDS
// Checks logging calls to keep from using patterns that might be expensive
// either in CPU or unnecessary object creation
// either in CPU or unnecessary object creation. Some of the checks are a little
// over-zealous, but it's worth a bit of aggravation to catch the egregious
// mistakes before they accrue.
configure(rootProject) {
}
// This all started with Java Flight Recorder showing a massive number of
// objects allocated and garbage collected. Tracing a bunch of them down showed
// that we have a number of calls like:
// log.debug("some string" + very_expensive_object_to_make_a_string).
// which _always_ constructed the very expensive ojbect even if the log level
// was INFO or more severe. Rather than leave lots of confusing formats laying
// about or expecting people to remember the rules, we opted to make this
// check part of Gradle precommit. It will be included by default in Solr
// 9.0.
// See the associated help task "gradlew helpValidateLogCalls"
allprojects {
plugins.withType(JavaPlugin) {
task validateLogCalls(type: ValidateLogCallsTask) {
description "Checks that log calls are either validated or conform to efficient patterns."
group "verification"
doFirst {
if (project.hasProperty('srcDir')) {
srcDir.addAll(project.getProperty('srcDir').split(','))
} else { // Remove this later, make it optional
//TODO
throw new BuildException(String.format(Locale.ENGLISH,
'''Until we get all the calls cleaned up, you MUST specify -PsrcDir=relative_path, e.g.
"-PsrcDir=solr/core/src/java/org/apache/solr/core". This task will recursively check all
"*.java" files under that directory'''))
}
checkPlus = Boolean.valueOf(propertyOrDefault('checkPlus', 'false'))
}
}
}
// // Attach ecjLint to check.
// check.dependsOn ecjLint
// What does the below mean?
// // Each validation task should be attached to check but make sure
// // precommit() as a whole is a dependency on rootProject.check
// check.dependsOn precommit
}
//}
class ValidateLogCallsTask extends DefaultTask {
@Input
List<String> srcDir = []
@Input
boolean checkPlus
// TODO, remove when you go to project-based checking.
Set<String> dirsToCheck = [
"solr/core/src/java/org/apache/solr/analysis"
, "solr/core/src/java/org/apache/solr/api"
, "solr/core/src/java/org/apache/solr/client"
, "solr/core/src/java/org/apache/solr/cloud" // 120
, "solr/core/src/java/org/apache/solr/cloud/api"
, "solr/core/src/java/org/apache/solr/cloud/autoscaling"
, "solr/core/src/java/org/apache/solr/cloud/cdcr"
, "solr/core/src/java/org/apache/solr/cloud/hdfs"
, "solr/core/src/java/org/apache/solr/cloud/overseer"
, "solr/core/src/java/org/apache/solr/cloud/rule"
, "solr/core/src/java/org/apache/solr/core"
, "solr/core/src/java/org/apache/solr/filestore"
, "solr/core/src/java/org/apache/solr/handler/admin"
, "solr/core/src/java/org/apache/solr/handler/component"
, "solr/core/src/java/org/apache/solr/handler/export"
, "solr/core/src/java/org/apache/solr/handler/loader"
, "solr/core/src/java/org/apache/solr/handler/tagger"
, "solr/core/src/java/org/apache/solr/highlight"
, "solr/core/src/java/org/apache/solr/index"
, "solr/core/src/java/org/apache/solr/internal"
, "solr/core/src/java/org/apache/solr/legacy"
, "solr/core/src/java/org/apache/solr/logging"
, "solr/core/src/java/org/apache/solr/metrics"
, "solr/core/src/java/org/apache/solr/packagemanager"
, "solr/core/src/java/org/apache/solr/parser"
, "solr/core/src/java/org/apache/solr/pkg"
, "solr/core/src/java/org/apache/solr/query"
, "solr/core/src/java/org/apache/solr/request"
, "solr/core/src/java/org/apache/solr/response"
, "solr/core/src/java/org/apache/solr/rest"
, "solr/core/src/java/org/apache/solr/schema"
, "solr/core/src/java/org/apache/solr/search"
, "solr/core/src/java/org/apache/solr/security"
, "solr/core/src/java/org/apache/solr/servlet"
, "solr/core/src/java/org/apache/solr/spelling"
, "solr/core/src/java/org/apache/solr/store"
, "solr/core/src/java/org/apache/solr/uninverting"
, "solr/core/src/java/org/apache/solr/update"
, "solr/core/src/java/org/apache/solr/util"
, "solr/solrj"
, "solr/core/src/java/org/apache/solr/handler"
, "solr/core/src/test/org/apache/solr/cloud/api"
, "solr/core/src/test/org/apache/solr/cloud/autoscaling"
// , "solr/core/src/test/org/apache/solr/cloud"
// , "solr/core/src/test/org/apache/solr/cloud/cdcr"
// , "solr/core/src/test/org/apache/solr/handler"
// , "solr/core/src/test/org/apache/solr/metrics"
// , "solr/core/src/test/org/apache/solr/request"
// , "solr/core/src/test/org/apache/solr/response"
// , "solr/core/src/test/org/apache/solr/schema"
// , "solr/core/src/test/org/apache/solr/search"
// , "solr/core/src/test/org/apache/solr/security"
// , "solr/core/src/test/org/apache/solr/spelling"
// , "solr/core/src/test/org/apache/solr"
// , "solr/core/src/test/org/apache/solr/update"
// , "solr/core/src/test/org/apache/solr/util"
// , "solr/core/src/test"
// , "solr/core"
]
//TODO REMOVE ME! Really! and the check for bare parens. Several times I've put in () when I meant {} and only
// caught it by chance. So for this mass edit, I created a check with a a lot of false positives. This is a list of
// them and we won't report them.
Map<String, List<Integer>> parenHack = [
"AddReplicaCmd.java" : [99]
, "Assign.java" : [329]
, "CloudSolrClientTest.java" : [1083]
, "CommitTracker.java" : [135]
, "DeleteReplicaCmd.java" : [75]
, "DirectUpdateHandler2.java" : [838, 859]
, "ManagedIndexSchemaFactory.java": [284]
, "MoveReplicaCmd.java" : [75]
, "PeerSync.java" : [704]
, "RecordingJSONParser.java" : [76]
, "SliceMutator.java" : [61]
, "SolrDispatchFilter.java" : [150, 205, 242]
, "Suggester.java" : [147, 181]
, "TestSimTriggerIntegration.java" : [710, 713]
, "TestSolrJErrorHandling.java" : [289]
, "TriggerIntegrationTest.java" : [427, 430]
, "UpdateLog.java" : [1976]
, "V2HttpCall.java" : [158]
// checking against 8x in master, take these out usually.
// , "CoreContainer.java" : [1096]
// , "ConcurrentLFUCache.java" : [700, 911]
// , "ConcurrentLRUCache.java" : [911]
// , "DirectUpdateHandler2.java" : [844, 865]
// , "PeerSync.java" : [697]
// , "SolrIndexWriter.java" : [356]
// , "UpdateLog.java" : [1973]
]
def logLevels = ["log.trace", "log.debug", "log.info", "log.warn", "log.error", "log.fatal"]
def errsFound = 0;
@ -170,32 +56,16 @@ class ValidateLogCallsTask extends DefaultTask {
// We have a log.something line, check for patterns we're not fond of.
def checkLine(File file, String line, int lineNumber, String previous) {
boolean violation = false
def bareParens = (line =~ /".*?"/).findAll()
bareParens.each({ part ->
if (part.contains("()")) {
List<Integer> hack = parenHack.get(file.name)
if (hack == null || hack.contains(lineNumber) == false) {
violation = true
}
}
})
// If the line has been explicitly checked, skip it.
// If the line has been explicitly OK'd, then it's OK!
if (line.replaceAll("\\s", "").toLowerCase().contains("//logok")) {
return
}
// Strip all of the comments, things in quotes and the like.
def level = ""
def lev = (line =~ "log\\.(.*?)\\(")
if (lev.find()) {
level = lev.group(1).toLowerCase().trim()
}
def stripped =
line.replaceFirst("//.*", " ") // remove comment to EOL. Again, fragile due to the possibility of embedded double slashes
.replaceFirst(/.*?\(/, " ") // Get rid of "log.info("
line.replaceFirst("//.*", " ") // remove comment to EOL. May be fragile due to the possibility of embedded double slashes
.replaceFirst(/.*?\(/, " ") // Get rid of "log.whatever("
.replaceFirst(/\);/, " ") // get rid of the closing ");"
.replaceFirst("/\\*.*?\\*/", " ") // replace embedded comments "/*....*/"
.replaceAll(/".*?"/, '""') // remove anything between quotes. This is a bit fragile if there are embedded double quotes.
@ -203,12 +73,9 @@ class ValidateLogCallsTask extends DefaultTask {
.replaceAll(/TimeUnit\..*?\.convert\(.*?\)/, " ") // again, a pattern that's efficient
.replaceAll("\\s", "")
def m = stripped =~ "\\(.*?\\)"
def hasParens = m.find()
// The compiler will pre-assemble patterns like 'log.info("string const1 {}" + " string const2 {}", obj1, obj2)'
// to log.info("string const1 {} string const2 {}", obj1, obj2)', so don't worry about any plus preceeded and
// followed by double quotes.
// to log.info("string const1 {} string const2 {}", obj1, obj2)', so don't worry about any plus preceded and
// followed by double quotes, otherwise flag it.
def hasPlus = false
for (int idx = 0; idx < stripped.length(); ++idx) {
if (stripped.charAt(idx) == '+') {
@ -219,73 +86,87 @@ class ValidateLogCallsTask extends DefaultTask {
}
}
}
// Check that previous line isn't an if statement for always-reported log levels. Arbitrary decision: we don't
// really care about checking for awkward constructions for WARN and above, so report a violation if the previous
// line contains an if for those levels.
//TODO remove me
String cause = ""
// Get the level of this log message.
// We don't really care about checking for method calls for WARN and more severe, the presumption
// is that they _should_ always be reported.
boolean dontReportLevels = level.equals("fatal") || level.equals("error") || level.equals("warn")
def level = ""
def lev = (line =~ "log\\.(.*?)\\(")
if (lev.find()) {
level = lev.group(1).toLowerCase().trim()
}
if (logLevels.contains("log." + level) == false) {
reportViolation(String.format("Found an unexpeted log level: %s, file: %s:%d"
, level, file.getAbsolutePath(), lineNumber))
}
boolean persnicketyLevel = !(level.equals("fatal") || level.equals("error") || level.equals("warn"))
// Check that previous line isn't an if statement for always-reported log levels.
// There's a convention to declare a member variable for whether a level is enabled and check that rather than
// isDebugEnabled so we need to check both.
String prevLine = previous.replaceAll("\\s+", "").toLowerCase()
boolean prevLineNotIf = ((prevLine.contains("if(log.is" + level + "enabled") == false
&& prevLine.contains("if(" + level + ")") == false))
if (dontReportLevels) {
// Only look (optionally) for plusses if surrounded by an if isLevelEnabled clause
// Otherwise, we're always OK with logging the message.
if (hasPlus && checkPlus) {
violation = true
}
} else { // less severe than warn, check for parens and plusses and correct
if (hasParens && prevLineNotIf) {
violation = true
}
if (hasPlus && checkPlus) {
violation = true
}
if (hasPlus && prevLineNotIf) {
if (hasPlus) {
cause = "hasPlus: " + hasPlus
violation = true
}
if (violation == false) {
def m = stripped =~ "\\(.*?\\)"
def hasParens = m.find()
if (hasParens && prevLineNotIf && persnicketyLevel) {
cause = "hasParens " + hasParens + " prevLineNotIf " + prevLineNotIf + " pernicketyLevel " + persnicketyLevel
violation = true
}
}
// Always report toString(). Note, this over-reports some constructs
// but just add //logOK if it's really OK.
if (line.contains("toString(") == true) {
if (line.replaceAll(/Arrays.toString\(/, "").contains("toString(") && prevLineNotIf) {
if (violation == false) {
if (line.contains("toString(") == true && prevLineNotIf) {
cause = "Line contains toString"
violation = true
}
}
if (violation) {
reportViolation(String.format("Suspicious logging call, Parameterize and possibly surround with 'if (log.is*Enabled) {..}'. Help at: 'gradlew helpValidateLogCalls' %s %s:%d"
, System.lineSeparator, file.getAbsolutePath(), lineNumber))
reportViolation(String.format("cause: '%s' Suspicious logging call, Parameterize and possibly surround with 'if (log.is*Enabled) {..}'. Help at: 'gradlew helpValidateLogCalls' %s %s:%d"
, cause , System.lineSeparator, file.getAbsolutePath(), lineNumber))
}
return
}
// Require all our logger definitions lower case "log", except a couple of special ones.
def checkLogName(File file, String line) {
// It's many times faster to do check this way than use a regex
if (line.contains("static ") && line.contains("getLogger") && line.contains(" log ") == false) {
switch (file.name) {
case "LoggerFactory.java": break
case "SolrCore.java": // Except for two know log files with a different name.
if (line.contains("requestLog") || line.contains("slowLog")) {
break
}
case "StartupLoggingUtils.java":
if (line.contains("getLoggerImplStr")) {
break;
}
default:
reportViolation("Change the logger name to lower-case 'log' in " + file.name + " " + line)
break;
String name = file.name
if (name.equals("LoggerFactory.java")) {
return
}
if (name.equals("SolrCore.java") && (line.contains("requestLog") || line.contains("slowLog"))) {
return
}
if (name.equals("StartupLoggingUtils.java") && line.contains("getLoggerImplStr")) {
return
}
// Apparently the Hadoop code expectes upper-case LOG, so...
if ((name.equals("HttpServer2.java") || name.equals("BlockPoolSlice.java") || name.equals("FileUtil.java"))
&& line.contains(" LOG ")) {
return
}
reportViolation("Change the logger name to lower-case 'log' in " + file.name + " " + line + " project" + project)
}
}
def checkFile(File file) {
int state = 0 // 0 == not collecting a log line, 1 == collecting a log line, 2 == just collected the last.
int state = 0
int lineNumber = 0
StringBuilder sb = new StringBuilder();
@ -296,7 +177,7 @@ class ValidateLogCallsTask extends DefaultTask {
lineNumber++
checkLogName(file, line)
switch (state) {
case 0:
case 0: // Not collecting a logging line.
logLevels.each {
if (line.contains(it)) {
if (line.contains(");")) {
@ -310,8 +191,11 @@ class ValidateLogCallsTask extends DefaultTask {
}
break
case 1: // collecting
if (line.contains(");")) {
case 1: // collecting a log line
if (line.replaceFirst("//.*", " ")
.replaceFirst("/\\*.*?\\*/", " ") // replace embedded comments "/*....*/"
.replaceAll(/".*?"/, '""') // remove anything between quotes. This is a bit fragile if there are embedded double quotes.
.trim().endsWith(");")) {
state = 2
}
sb.append(line)
@ -322,12 +206,12 @@ class ValidateLogCallsTask extends DefaultTask {
break
}
switch (state) { // It's just easier to do this here rather than read another line in the switch above.
case 0:
case 0: // Not collcting a log line
prevLine = line.toLowerCase();
break;
case 1:
case 1: // collecting a logging line.
break;
case 2:
case 2: // We've collected the complete log line.
checkLine(file, sb.toString(), lineNumber, prevLine)
state = 0
break;
@ -340,40 +224,10 @@ class ValidateLogCallsTask extends DefaultTask {
@TaskAction
def checkLogLines() {
// println srcDir
dirsToCheck.addAll(srcDir)
//TODO. This is here to check 8x on another branch since I can't run Gradle
// over 8x. Used periodically as a sanity check.
// new File("/Users/Erick/apache/solrJiras/master").traverse(type: groovy.io.FileType.FILES, nameFilter: ~/.*\.java/) { File it ->
// if (dirsToCheck.any { dir ->
// it.getCanonicalPath().contains(dir)
// }) {
// if (checkFile(it)) {
// println(it.getAbsolutePath())
// // TODO. This just makes it much easier to get to the files during this mass migration!
// }
// }
// new File("/Users/Erick/apache/SolrEOEFork").traverse(type: groovy.io.FileType.FILES, nameFilter: ~/.*\.java/) { File it ->
// if (checkFile(it)) {
// println(it.getAbsolutePath())
// }
// }
//
//TODO
// println project
// This is the real stuff
project.sourceSets.each { srcSet ->
srcSet.java.each { f ->
if (srcDir.contains("all")) { // TODO
checkFile(f)
} else if (dirsToCheck.any {
f.getCanonicalPath().contains(it)
}) {
checkFile(f)
}
checkFile(f)
}
}

View File

@ -42,9 +42,8 @@ NOTES:
we can use lambdas rather than "if log.is*Enabled". slf4j 2.0
will when released.
- warn, error, and fatal level messages are NOT flagged. However, if you want to
check these levels for including '+', specify '-PcheckPlus=true'. This is more
a style than functional check.
- warn, error, and fatal level messages are NOT flagged for parens, but will
be flagged for containg plusses that aren't simple concatenation.
- You can get into some pretty convolued consructs trying to pass some of these
checks. Adding //logok, with or without spaces will cause the line to pass
@ -57,9 +56,3 @@ NOTES:
For a fuller discussion, see LUCENE-7788 and the other JIRAs linked
from there.
Until we get all the calls cleaned up, you MUST specify -PsrcDir=relative_path,
e.g. '-PsrcDir=solr/core/src/java/org/apache/solr/core'. This task will
recursively check all '*.java. files under that directory. Actually, it
just checks any file whose AbsolutePath contains the specification. May be
comma-delimited.

View File

@ -33,7 +33,9 @@ public abstract class AbstractHandler<T extends Observer> {
public void addObserver(T observer) {
observers.add(observer);
log.debug("{} registered.", observer.getClass().getName());
if (log.isDebugEnabled()) {
log.debug("{} registered.", observer.getClass().getName());
}
}
void notifyObservers() {

View File

@ -365,7 +365,7 @@ public final class AddDocumentDialogFactory implements DialogOpener.DialogFactor
}
addDocument(doc);
log.info("Added document: {}", doc.toString());
log.info("Added document: {}", doc);
}
@SuppressWarnings("unchecked")

View File

@ -248,10 +248,10 @@ public final class ExportTermsDialogFactory implements DialogOpener.DialogFactor
try {
filename = toolsModel.exportTerms(directory, field, selectedDelimiter);
} catch (LukeException e) {
log.error("Error while exporting terms from field " + field, e);
log.error("Error while exporting terms from field {}", field, e);
statusLbl.setText(MessageUtils.getLocalizedMessage("export.terms.label.error", e.getMessage()));
} catch (Exception e) {
log.error("Error while exporting terms from field " + field, e);
log.error("Error while exporting terms from field {}", field, e);
statusLbl.setText(MessageUtils.getLocalizedMessage("message.error.unknown"));
throw e;
} finally {

View File

@ -245,7 +245,9 @@ public final class DocumentsImpl extends LukeModel implements Documents {
if (penum.nextDoc() == PostingsEnum.NO_MORE_DOCS) {
// end of the iterator
resetPostingsIterator();
log.info("Reached the end of the postings iterator for term: {} in field: {}", BytesRefUtils.decode(tenum.term()), curField);
if (log.isInfoEnabled()) {
log.info("Reached the end of the postings iterator for term: {} in field: {}", BytesRefUtils.decode(tenum.term()), curField);
}
return Optional.empty();
} else {
return Optional.of(penum.docID());

View File

@ -93,7 +93,9 @@ public final class IndexUtils {
throw new RuntimeException("No valid directory at the location: " + indexPath);
}
log.info(String.format(Locale.ENGLISH, "IndexReaders (%d leaf readers) successfully opened. Index path=%s", readers.size(), indexPath));
if (log.isInfoEnabled()) {
log.info(String.format(Locale.ENGLISH, "IndexReaders (%d leaf readers) successfully opened. Index path=%s", readers.size(), indexPath));
}
if (readers.size() == 1) {
return readers.get(0);
@ -115,7 +117,9 @@ public final class IndexUtils {
public static Directory openDirectory(String dirPath, String dirImpl) throws IOException {
final Path path = FileSystems.getDefault().getPath(Objects.requireNonNull(dirPath));
Directory dir = openDirectory(path, dirImpl);
log.info(String.format(Locale.ENGLISH, "DirectoryReader successfully opened. Directory path=%s", dirPath));
if (log.isInfoEnabled()) {
log.info(String.format(Locale.ENGLISH, "DirectoryReader successfully opened. Directory path=%s", dirPath));
}
return dir;
}

View File

@ -54,7 +54,7 @@ public class MessageFilesParser extends SimpleFileVisitor<Path> {
}
}
} catch (IOException e) {
log.warn("Invalid file? " + file.toString());
log.warn("Invalid file? {}", file);
}
return FileVisitResult.CONTINUE;
}

View File

@ -84,7 +84,7 @@ final class SubtypeCollector<T> implements Runnable {
}
}
} catch (IOException e) {
log.error("Cannot load jar " + url.toString(), e);
log.error("Cannot load jar {}", url, e);
}
}
}

View File

@ -67,7 +67,7 @@ public abstract class StrategyTestCase extends SpatialTestCase {
protected boolean storeShape = true;
protected void executeQueries(SpatialMatchConcern concern, String... testQueryFile) throws IOException {
log.info("testing queried for strategy "+strategy);
log.info("testing queried for strategy "+strategy); // logOk
for( String path : testQueryFile ) {
Iterator<SpatialTestQuery> testQueryIterator = getTestQueries(path, ctx);
runTestQueries(testQueryIterator, concern);

View File

@ -64,7 +64,7 @@ public class HeatmapFacetCounterTest extends StrategyTestCase {
@After
public void after() {
log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero");
log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero"); // logOK
}
@Test

View File

@ -87,7 +87,7 @@ public class RandomSpatialOpFuzzyPrefixTreeTest extends StrategyTestCase {
((PrefixTreeStrategy) strategy).setPointsOnly(true);
}
log.info("Strategy: " + strategy.toString());
log.info("Strategy: " + strategy.toString()); // logOk
}
private void setupCtx2D(SpatialContext ctx) {

View File

@ -179,7 +179,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar
if (!engine.isAvailable()) {
if (optional) {
log.info("Optional clustering engine not available: " + name);
log.info("Optional clustering engine not available: {}", name);
} else {
throw new SolrException(ErrorCode.SERVER_ERROR,
"A required clustering engine failed to initialize, check the logs: " + name);
@ -192,11 +192,11 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar
} else if (engine instanceof DocumentClusteringEngine) {
previousEntry = documentClusteringEngines.put(name, (DocumentClusteringEngine) engine);
} else {
log.warn("Unknown type of a clustering engine for class: " + engineClassName);
log.warn("Unknown type of a clustering engine for class: {}", engineClassName);
continue;
}
if (previousEntry != null) {
log.warn("Duplicate clustering engine component named '" + name + "'.");
log.warn("Duplicate clustering engine component named '{}'.", name);
}
}
}
@ -237,7 +237,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar
Object clusters = engine.cluster(rb.getQuery(), solrDocList, docIds, rb.req);
rb.rsp.add("clusters", clusters);
} else {
log.warn("No engine named: " + name);
log.warn("No engine named: {}", name);
}
}
@ -257,7 +257,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar
}
rb.rsp.add("clusters", nl);
} else {
log.warn("No engine named: " + name);
log.warn("No engine named: {}", name);
}
}
}
@ -312,7 +312,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar
sreq.params.set(CommonParams.FL, fl + sb.toString());
}
} else {
log.warn("No engine named: " + name);
log.warn("No engine named: {}", name);
}
}
}
@ -342,7 +342,7 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar
Object clusters = engine.cluster(rb.getQuery(), solrDocList, docIds, rb.req);
rb.rsp.add("clusters", clusters);
} else {
log.warn("No engine named: " + name);
log.warn("No engine named: {}", name);
}
}
}
@ -383,9 +383,11 @@ public class ClusteringComponent extends SearchComponent implements SolrCoreAwar
}
if (defaultEngine != null) {
log.info("Default engine for " + type + ": " + engineName + " [" + defaultEngine.getClass().getSimpleName() + "]");
if (log.isInfoEnabled()) {
log.info("Default engine for {}: {} [{}]", type, engineName, defaultEngine.getClass().getSimpleName());
}
} else {
log.warn("No default engine for " + type + ".");
log.warn("No default engine for {}.", type);
}
}
}

View File

@ -153,15 +153,16 @@ public class CarrotClusteringEngine extends SearchClusteringEngine {
// Load Carrot2-Workbench exported attribute XMLs based on the 'name' attribute
// of this component. This by-name convention lookup is used to simplify configuring algorithms.
String componentName = initParams.get(ClusteringEngine.ENGINE_NAME);
log.info("Initializing Clustering Engine '" +
MoreObjects.firstNonNull(componentName, "<no 'name' attribute>") + "'");
if (log.isInfoEnabled()) {
log.info("Initializing Clustering Engine '{}'", MoreObjects.firstNonNull(componentName, "<no 'name' attribute>"));
}
if (!Strings.isNullOrEmpty(componentName)) {
IResource[] attributeXmls = resourceLookup.getAll(componentName + "-attributes.xml");
if (attributeXmls.length > 0) {
if (attributeXmls.length > 1) {
log.warn("More than one attribute file found, first one will be used: "
+ Arrays.toString(attributeXmls));
log.warn("More than one attribute file found, first one will be used: {}"
, Arrays.toString(attributeXmls)); // logOk
}
withContextClassLoader(core.getResourceLoader().getClassLoader(), () -> {
@ -308,8 +309,8 @@ public class CarrotClusteringEngine extends SearchClusteringEngine {
if (split.length == 2 && StringUtils.isNotBlank(split[0]) && StringUtils.isNotBlank(split[1])) {
languageCodeMap.put(split[0], split[1]);
} else {
log.warn("Unsupported format for " + CarrotParams.LANGUAGE_CODE_MAP
+ ": '" + pair + "'. Skipping this mapping.");
log.warn("Unsupported format for {}: '{}'. Skipping this mapping."
, CarrotParams.LANGUAGE_CODE_MAP, pair);
}
}
}
@ -457,8 +458,8 @@ public class CarrotClusteringEngine extends SearchClusteringEngine {
if (split.length == 2 && StringUtils.isNotBlank(split[0]) && StringUtils.isNotBlank(split[1])) {
customFields.put(split[0], split[1]);
} else {
log.warn("Unsupported format for " + CarrotParams.CUSTOM_FIELD_NAME
+ ": '" + customFieldSpec + "'. Skipping this field definition.");
log.warn("Unsupported format for {}: '{}'. Skipping this field definition."
, CarrotParams.CUSTOM_FIELD_NAME, customFieldSpec);
}
}
}

View File

@ -137,17 +137,17 @@ public class LuceneCarrot2StemmerFactory implements IStemmerFactory {
.get(language);
if (stemmerClazz == null) {
log.warn("No Snowball stemmer class for: " + language.name()
+ ". Quality of clustering may be degraded.");
log.warn("No Snowball stemmer class for: {}. "
+ "Quality of clustering may be degraded.", language.name());
return IdentityStemmer.INSTANCE;
}
try {
return new SnowballStemmerAdapter(stemmerClazz.getConstructor().newInstance());
} catch (Exception e) {
log.warn("Could not instantiate snowball stemmer"
+ " for language: " + language.name()
+ ". Quality of clustering may be degraded.", e);
log.warn("Could not instantiate snowball stemmer for language: {}"
+ ". Quality of clustering may be degraded."
, language.name(), e);
return IdentityStemmer.INSTANCE;
}

View File

@ -60,7 +60,7 @@ class SolrResourceLocator implements IResourceLocator {
@Override
public IResource[] getAll(final String resource) {
final String resourceName = carrot2ResourcesDir + "/" + resource;
log.debug("Looking for Solr resource: " + resourceName);
log.debug("Looking for Solr resource: {}", resourceName);
InputStream resourceStream = null;
final byte [] asBytes;
@ -68,8 +68,8 @@ class SolrResourceLocator implements IResourceLocator {
resourceStream = resourceLoader.openResource(resourceName);
asBytes = IOUtils.toByteArray(resourceStream);
} catch (IOException e) {
log.debug("Resource not found in Solr's config: " + resourceName
+ ". Using the default " + resource + " from Carrot JAR.");
log.debug("Resource not found in Solr's config: {}. Using the default {} from Carrot JAR."
, resourceName, resource);
return new IResource[] {};
} finally {
if (resourceStream != null) {
@ -81,7 +81,7 @@ class SolrResourceLocator implements IResourceLocator {
}
}
log.info("Loaded Solr resource: " + resourceName);
log.info("Loaded Solr resource: {}", resourceName);
final IResource foundResource = new IResource() {
@Override

View File

@ -112,7 +112,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
String varName = ConfigNameConstants.IMPORTER_NS_SHORT + "." + cname + "."
+ DocBuilder.LAST_INDEX_TIME;
Object varValue = context.getVariableResolver().resolve(varName);
log.info(varName+"="+varValue);
log.info("{}={}", varName, varValue);
if (varValue != null && !"".equals(varValue) &&
!"".equals(getStringFromContext("fetchMailsSince", ""))) {
@ -123,21 +123,21 @@ public class MailEntityProcessor extends EntityProcessorBase {
try {
tmp = sinceDateParser.parse((String)varValue);
if (tmp.getTime() == 0) {
log.info("Ignoring initial value "+varValue+" for "+varName+
" in favor of fetchMailsSince config parameter");
log.info("Ignoring initial value {} for {} in favor of fetchMailsSince config parameter"
, varValue, varName);
tmp = null; // don't use this value
}
} catch (ParseException e) {
// probably ok to ignore this since we have other options below
// as we're just trying to figure out if the date is 0
log.warn("Failed to parse "+varValue+" from "+varName+" due to: "+e);
log.warn("Failed to parse {} from {} due to", varValue, varName, e);
}
if (tmp == null) {
// favor fetchMailsSince in this case because the value from
// dataimport.properties is the default/init value
varValue = getStringFromContext("fetchMailsSince", "");
log.info("fetchMailsSince="+varValue);
log.info("fetchMailsSince={}", varValue);
}
}
@ -145,7 +145,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
varName = ConfigNameConstants.IMPORTER_NS_SHORT + "."
+ DocBuilder.LAST_INDEX_TIME;
varValue = context.getVariableResolver().resolve(varName);
log.info(varName+"="+varValue);
log.info("{}={}", varName, varValue);
}
if (varValue != null && varValue instanceof String) {
@ -157,13 +157,13 @@ public class MailEntityProcessor extends EntityProcessorBase {
if (lastIndexTime == null)
lastIndexTime = getStringFromContext("fetchMailsSince", "");
log.info("Using lastIndexTime "+lastIndexTime+" for mail import");
log.info("Using lastIndexTime {} for mail import", lastIndexTime);
this.fetchMailsSince = null;
if (lastIndexTime != null && lastIndexTime.length() > 0) {
try {
fetchMailsSince = sinceDateParser.parse(lastIndexTime);
log.info("Parsed fetchMailsSince=" + lastIndexTime);
log.info("Parsed fetchMailsSince={}", lastIndexTime);
} catch (ParseException e) {
throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
"Invalid value for fetchMailSince: " + lastIndexTime, e);
@ -247,8 +247,8 @@ public class MailEntityProcessor extends EntityProcessorBase {
addPartToDocument(mail, row, true);
return row;
} catch (Exception e) {
log.error("Failed to convert message [" + mail.toString()
+ "] to document due to: " + e, e);
log.error("Failed to convert message [{}] to document due to: {}"
, mail, e, e);
return null;
}
}
@ -269,9 +269,9 @@ public class MailEntityProcessor extends EntityProcessorBase {
for (int i = 0; i < count; i++)
addPartToDocument(mp.getBodyPart(i), row, false);
} else {
log.warn("Multipart content is a not an instance of Multipart! Content is: "
+ (content != null ? content.getClass().getName() : "null")
+ ". Typically, this is due to the Java Activation JAR being loaded by the wrong classloader.");
log.warn("Multipart content is a not an instance of Multipart! Content is: {}"
+ ". Typically, this is due to the Java Activation JAR being loaded by the wrong classloader."
, (content != null ? content.getClass().getName() : "null"));
}
} else if (part.isMimeType("message/rfc822")) {
addPartToDocument((Part) part.getContent(), row, false);
@ -374,8 +374,8 @@ public class MailEntityProcessor extends EntityProcessorBase {
if (("imap".equals(protocol) || "imaps".equals(protocol))
&& "imap.gmail.com".equals(host)) {
log.info("Consider using 'gimaps' protocol instead of '" + protocol
+ "' for enabling GMail specific extensions for " + host);
log.info("Consider using 'gimaps' protocol instead of '{}' for enabling GMail specific extensions for {}"
, protocol, host);
}
props.setProperty("mail.store.protocol", protocol);
@ -399,7 +399,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
} else {
mailbox.connect(host, user, password);
}
log.info("Connected to " + user + "'s mailbox on " + host);
log.info("Connected to {}'s mailbox on {}", user, host);
return true;
} catch (MessagingException e) {
@ -474,7 +474,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
.append(lineSep);
config.append("includeSharedFolders : ").append(includeSharedFolders)
.append(lineSep);
log.info(config.toString());
log.info("{}", config);
}
class FolderIterator implements Iterator<Folder> {
@ -515,14 +515,16 @@ public class MailEntityProcessor extends EntityProcessorBase {
hasMessages = (next.getType() & Folder.HOLDS_MESSAGES) != 0;
next.open(Folder.READ_ONLY);
lastFolder = next;
log.info("Opened folder : " + fullName);
log.info("Opened folder : {}", fullName);
}
if (recurse && ((next.getType() & Folder.HOLDS_FOLDERS) != 0)) {
Folder[] children = next.list();
log.info("Added its children to list : ");
for (int i = children.length - 1; i >= 0; i--) {
folders.add(0, children[i]);
log.info("child name : " + children[i].getFullName());
if (log.isInfoEnabled()) {
log.info("child name : {}", children[i].getFullName());
}
}
if (children.length == 0) log.info("NO children : ");
}
@ -530,7 +532,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
} while (!hasMessages);
return next;
} catch (Exception e) {
log.warn("Failed to read folders due to: "+e);
log.warn("Failed to read folders due to: {}", e);
// throw new
// DataImportHandlerException(DataImportHandlerException.SEVERE,
// "Folder open failed", e);
@ -568,13 +570,13 @@ public class MailEntityProcessor extends EntityProcessorBase {
try {
Folder[] ufldrs = mailbox.getUserNamespaces(null);
if (ufldrs != null) {
log.info("Found " + ufldrs.length + " user namespace folders");
log.info("Found {} user namespace folders", ufldrs.length);
for (Folder ufldr : ufldrs)
folders.add(ufldr);
}
} catch (MessagingException me) {
log.warn("Messaging exception retrieving user namespaces: "
+ me.getMessage());
log.warn("Messaging exception retrieving user namespaces: {}"
, me.getMessage());
}
}
@ -582,13 +584,13 @@ public class MailEntityProcessor extends EntityProcessorBase {
try {
Folder[] sfldrs = mailbox.getSharedNamespaces();
if (sfldrs != null) {
log.info("Found " + sfldrs.length + " shared namespace folders");
log.info("Found {} shared namespace folders", sfldrs.length);
for (Folder sfldr : sfldrs)
folders.add(sfldr);
}
} catch (MessagingException me) {
log.warn("Messaging exception retrieving shared namespaces: "
+ me.getMessage());
log.warn("Messaging exception retrieving shared namespaces: {}"
, me.getMessage());
}
}
@ -620,14 +622,16 @@ public class MailEntityProcessor extends EntityProcessorBase {
this.batchSize = batchSize;
SearchTerm st = getSearchTerm();
log.info("SearchTerm=" + st);
log.info("SearchTerm={}", st);
if (st != null || folder instanceof GmailFolder) {
doBatching = false;
// Searching can still take a while even though we're only pulling
// envelopes; unless you're using gmail server-side filter, which is
// fast
log.info("Searching folder " + folder.getName() + " for messages");
if (log.isInfoEnabled()) {
log.info("Searching folder {} for messages", folder.getName());
}
final RTimer searchTimer = new RTimer();
// If using GMail, speed up the envelope processing by doing a
@ -642,12 +646,14 @@ public class MailEntityProcessor extends EntityProcessorBase {
if (folder instanceof GmailFolder && fetchMailsSince != null) {
String afterCrit = "after:" + afterFmt.format(fetchMailsSince);
log.info("Added server-side gmail filter: " + afterCrit);
log.info("Added server-side gmail filter: {}", afterCrit);
Message[] afterMessages = folder.search(new GmailRawSearchTerm(
afterCrit));
log.info("GMail server-side filter found " + afterMessages.length
+ " messages received " + afterCrit + " in folder " + folder.getName());
if (log.isInfoEnabled()) {
log.info("GMail server-side filter found {} messages received {} in folder "
, afterMessages.length, afterCrit, folder.getName());
}
// now pass in the server-side filtered messages to the local filter
messagesInCurBatch = folder.search((st != null ? st : this), afterMessages);
@ -657,11 +663,13 @@ public class MailEntityProcessor extends EntityProcessorBase {
totalInFolder = messagesInCurBatch.length;
folder.fetch(messagesInCurBatch, fp);
current = 0;
log.info("Total messages : " + totalInFolder);
log.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime());
if (log.isInfoEnabled()) {
log.info("Total messages : {}", totalInFolder);
log.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime()); // logOk
}
} else {
totalInFolder = folder.getMessageCount();
log.info("Total messages : " + totalInFolder);
log.info("Total messages : {}", totalInFolder);
getNextBatch(batchSize, folder);
}
} catch (MessagingException e) {
@ -685,8 +693,8 @@ public class MailEntityProcessor extends EntityProcessorBase {
folder.fetch(messagesInCurBatch, fp);
current = 0;
currentBatch++;
log.info("Current Batch : " + currentBatch);
log.info("Messages in this batch : " + messagesInCurBatch.length);
log.info("Current Batch : {}", currentBatch);
log.info("Messages in this batch : {}", messagesInCurBatch.length);
}
public boolean hasNext() {
@ -741,8 +749,10 @@ public class MailEntityProcessor extends EntityProcessorBase {
@SuppressWarnings("serial")
public SearchTerm getCustomSearch(final Folder folder) {
log.info("Building mail filter for messages in " + folder.getName()
+ " that occur after " + sinceDateParser.format(since));
if (log.isInfoEnabled()) {
log.info("Building mail filter for messages in {} that occur after {}"
, folder.getName(), sinceDateParser.format(since));
}
return new DateTerm(ComparisonTerm.GE, since) {
private int matched = 0;
private int seen = 0;
@ -761,16 +771,20 @@ public class MailEntityProcessor extends EntityProcessorBase {
} else {
String msgDateStr = (msgDate != null) ? sinceDateParser.format(msgDate) : "null";
String sinceDateStr = (since != null) ? sinceDateParser.format(since) : "null";
log.debug("Message " + msg.getSubject() + " was received at [" + msgDateStr
+ "], since filter is [" + sinceDateStr + "]");
if (log.isDebugEnabled()) {
log.debug("Message {} was received at [{}], since filter is [{}]"
, msg.getSubject(), msgDateStr, sinceDateStr);
}
}
} catch (MessagingException e) {
log.warn("Failed to process message due to: "+e, e);
log.warn("Failed to process message due to: {}", e, e);
}
if (seen % 100 == 0) {
log.info("Matched " + matched + " of " + seen + " messages since: "
+ sinceDateParser.format(since));
if (log.isInfoEnabled()) {
log.info("Matched {} of {} messages since: {}"
, matched, seen, sinceDateParser.format(since));
}
}
return isMatch;

View File

@ -61,14 +61,14 @@ public class BinURLDataSource extends DataSource<InputStream>{
try {
connectionTimeout = Integer.parseInt(cTimeout);
} catch (NumberFormatException e) {
log.warn("Invalid connection timeout: " + cTimeout);
log.warn("Invalid connection timeout: {}", cTimeout);
}
}
if (rTimeout != null) {
try {
readTimeout = Integer.parseInt(rTimeout);
} catch (NumberFormatException e) {
log.warn("Invalid read timeout: " + rTimeout);
log.warn("Invalid read timeout: {}", rTimeout);
}
}
}
@ -79,7 +79,7 @@ public class BinURLDataSource extends DataSource<InputStream>{
try {
if (URIMETHOD.matcher(query).find()) url = new URL(query);
else url = new URL(baseUrl + query);
log.debug("Accessing URL: " + url.toString());
log.debug("Accessing URL: {}", url);
URLConnection conn = url.openConnection();
conn.setConnectTimeout(connectionTimeout);
conn.setReadTimeout(readTimeout);

View File

@ -266,7 +266,7 @@ public class DataImportHandler extends RequestHandlerBase implements
try {
return super.upload(document);
} catch (RuntimeException e) {
log.error("Exception while adding: " + document, e);
log.error("Exception while adding: {}", document, e);
return false;
}
}

View File

@ -125,7 +125,7 @@ public class DataImporter {
} else if(dataconfigFile!=null) {
is = new InputSource(core.getResourceLoader().openResource(dataconfigFile));
is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(dataconfigFile));
log.info("Loading DIH Configuration: " + dataconfigFile);
log.info("Loading DIH Configuration: {}", dataconfigFile);
}
if(is!=null) {
config = loadDataConfig(is);
@ -148,7 +148,7 @@ public class DataImporter {
for (int i = 0; i < dsConfig.size(); i++) {
props.put(dsConfig.getName(i), dsConfig.getVal(i).toString());
}
log.info("Adding properties to datasource: " + props);
log.info("Adding properties to datasource: {}", props);
dsProps.put((String) dsConfig.get("name"), props);
}
position++;

View File

@ -265,7 +265,9 @@ public class DocBuilder {
statusMessages.put(DataImporter.MSG.TOTAL_FAILED_DOCS, ""+ importStatistics.failedDocCount.get());
statusMessages.put("Time taken", getTimeElapsedSince(startTime.get()));
log.info("Time taken = " + getTimeElapsedSince(startTime.get()));
if (log.isInfoEnabled()) {
log.info("Time taken = {}", getTimeElapsedSince(startTime.get()));
}
} catch(Exception e)
{
throw new RuntimeException(e);
@ -385,7 +387,7 @@ public class DocBuilder {
key = map.get(keyName);
}
if(key == null) {
log.warn("no key was available for deleted pk query. keyName = " + keyName);
log.warn("no key was available for deleted pk query. keyName = {}", keyName);
continue;
}
writer.deleteDoc(key);
@ -483,7 +485,7 @@ public class DocBuilder {
if (seenDocCount <= reqParams.getStart())
continue;
if (seenDocCount > reqParams.getStart() + reqParams.getRows()) {
log.info("Indexing stopped at docCount = " + importStatistics.docCount);
log.info("Indexing stopped at docCount = {}", importStatistics.docCount);
break;
}
}
@ -759,9 +761,11 @@ public class DocBuilder {
"deltaQuery has no column to resolve to declared primary key pk='%s'",
pk));
}
log.info(String.format(Locale.ROOT,
"Resolving deltaQuery column '%s' to match entity's declared pk '%s'",
resolvedPk, pk));
if (log.isInfoEnabled()) {
log.info(String.format(Locale.ROOT,
"Resolving deltaQuery column '%s' to match entity's declared pk '%s'",
resolvedPk, pk));
}
return resolvedPk;
}
@ -796,7 +800,9 @@ public class DocBuilder {
// identifying the modified rows for this entity
Map<String, Map<String, Object>> deltaSet = new HashMap<>();
log.info("Running ModifiedRowKey() for Entity: " + epw.getEntity().getName());
if (log.isInfoEnabled()) {
log.info("Running ModifiedRowKey() for Entity: {}", epw.getEntity().getName());
}
//get the modified rows in this entity
String pk = epw.getEntity().getPk();
while (true) {
@ -844,8 +850,10 @@ public class DocBuilder {
return new HashSet();
}
log.info("Completed ModifiedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deltaSet.size());
log.info("Completed DeletedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deletedSet.size());
if (log.isInfoEnabled()) {
log.info("Completed ModifiedRowKey for Entity: {} rows obtained: {}", epw.getEntity().getName(), deltaSet.size());
log.info("Completed DeletedRowKey for Entity: {} rows obtained : {}", epw.getEntity().getName(), deletedSet.size()); // logOk
}
myModifiedPks.addAll(deltaSet.values());
Set<Map<String, Object>> parentKeyList = new HashSet<>();
@ -870,7 +878,9 @@ public class DocBuilder {
return new HashSet();
}
}
log.info("Completed parentDeltaQuery for Entity: " + epw.getEntity().getName());
if (log.isInfoEnabled()) {
log.info("Completed parentDeltaQuery for Entity: {}", epw.getEntity().getName());
}
if (epw.getEntity().isDocRoot())
deletedRows.addAll(deletedSet);

View File

@ -124,7 +124,7 @@ public class EntityProcessorWrapper extends EntityProcessor {
log.error(msg);
wrapAndThrow(SEVERE, nsme,msg);
} catch (Exception e) {
log.error("Unable to load Transformer: " + aTransArr, e);
log.error("Unable to load Transformer: {}", aTransArr, e);
wrapAndThrow(SEVERE, e,"Unable to load Transformer: " + trans);
}
}
@ -172,7 +172,7 @@ public class EntityProcessorWrapper extends EntityProcessor {
try {
return meth.invoke(o, aRow);
} catch (Exception e) {
log.warn("method invocation failed on transformer : " + trans, e);
log.warn("method invocation failed on transformer : {}", trans, e);
throw new DataImportHandlerException(WARN, e);
}
}

View File

@ -102,14 +102,14 @@ public class FileDataSource extends DataSource<Reader> {
File basePathFile;
if (basePath == null) {
basePathFile = new File(".").getAbsoluteFile();
log.warn("FileDataSource.basePath is empty. " +
"Resolving to: " + basePathFile.getAbsolutePath());
log.warn("FileDataSource.basePath is empty. Resolving to: {}"
, basePathFile.getAbsolutePath());
} else {
basePathFile = new File(basePath);
if (!basePathFile.isAbsolute()) {
basePathFile = basePathFile.getAbsoluteFile();
log.warn("FileDataSource.basePath is not absolute. Resolving to: "
+ basePathFile.getAbsolutePath());
log.warn("FileDataSource.basePath is not absolute. Resolving to: {}"
, basePathFile.getAbsolutePath());
}
}
@ -117,7 +117,9 @@ public class FileDataSource extends DataSource<Reader> {
}
if (file.isFile() && file.canRead()) {
log.debug("Accessing File: " + file.getAbsolutePath());
if (log.isDebugEnabled()) {
log.debug("Accessing File: {}", file.getAbsolutePath());
}
return file;
} else {
throw new FileNotFoundException("Could not find file: " + query +

View File

@ -87,7 +87,7 @@ public class JdbcDataSource extends
if (batchSize == -1)
batchSize = Integer.MIN_VALUE;
} catch (NumberFormatException e) {
log.warn("Invalid batch size: " + bsz);
log.warn("Invalid batch size: {}", bsz);
}
}
@ -172,9 +172,10 @@ public class JdbcDataSource extends
return factory = new Callable<Connection>() {
@Override
public Connection call() throws Exception {
log.info("Creating a connection for entity "
+ context.getEntityAttribute(DataImporter.NAME) + " with URL: "
+ url);
if (log.isInfoEnabled()) {
log.info("Creating a connection for entity {} with URL: {}"
, context.getEntityAttribute(DataImporter.NAME), url);
}
long start = System.nanoTime();
Connection c = null;
@ -205,8 +206,8 @@ public class JdbcDataSource extends
throw new DataImportHandlerException(SEVERE, "Exception initializing SQL connection", e);
}
}
log.info("Time taken for getConnection(): "
+ TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
log.info("Time taken for getConnection(): {}"
, TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
return c;
}
@ -316,11 +317,11 @@ public class JdbcDataSource extends
try {
Connection c = getConnection();
stmt = createStatement(c, batchSize, maxRows);
log.debug("Executing SQL: " + query);
log.debug("Executing SQL: {}", query);
long start = System.nanoTime();
resultSet = executeStatement(stmt, query);
log.trace("Time taken for sql :"
+ TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
log.trace("Time taken for sql : {}"
, TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
setColNames(resultSet);
} catch (Exception e) {
close();

View File

@ -165,7 +165,7 @@ public class RegexTransformer extends Transformer {
}
}
} catch (Exception e) {
log.warn("Parsing failed for field : " + columnName, e);
log.warn("Parsing failed for field : {}", columnName, e);
}
}
return l == null ? map: l;

View File

@ -214,7 +214,7 @@ public class SimplePropertiesWriter extends DIHProperties {
existingProps.putAll(newProps);
propOutput = new OutputStreamWriter(new FileOutputStream(getPersistFile()), StandardCharsets.UTF_8);
existingProps.store(propOutput, null);
log.info("Wrote last indexed time to " + filename);
log.info("Wrote last indexed time to {}", filename);
} catch (Exception e) {
throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
"Unable to persist Index Start Time", e);
@ -235,9 +235,9 @@ public class SimplePropertiesWriter extends DIHProperties {
filePath += filename;
propInput = new FileInputStream(filePath);
props.load(new InputStreamReader(propInput, StandardCharsets.UTF_8));
log.info("Read " + filename);
log.info("Read {}", filename);
} catch (Exception e) {
log.warn("Unable to read: " + filename);
log.warn("Unable to read: {}", filename);
} finally {
IOUtils.closeWhileHandlingException(propInput);
}

View File

@ -79,7 +79,7 @@ public class SolrWriter extends DIHWriterBase implements DIHWriter {
command.commitWithin = commitWithin;
processor.processAdd(command);
} catch (Exception e) {
log.warn("Error creating document : " + d, e);
log.warn("Error creating document : {}", d, e);
return false;
}
@ -89,24 +89,24 @@ public class SolrWriter extends DIHWriterBase implements DIHWriter {
@Override
public void deleteDoc(Object id) {
try {
log.info("Deleting document: " + id);
log.info("Deleting document: {}", id);
DeleteUpdateCommand delCmd = new DeleteUpdateCommand(req);
delCmd.setId(id.toString());
processor.processDelete(delCmd);
} catch (IOException e) {
log.error("Exception while deleteing: " + id, e);
log.error("Exception while deleteing: {}", id, e);
}
}
@Override
public void deleteByQuery(String query) {
try {
log.info("Deleting documents from Solr with query: " + query);
log.info("Deleting documents from Solr with query: {}", query);
DeleteUpdateCommand delCmd = new DeleteUpdateCommand(req);
delCmd.query = query;
processor.processDelete(delCmd);
} catch (IOException e) {
log.error("Exception while deleting by query: " + query, e);
log.error("Exception while deleting by query: {}", query, e);
}
}

View File

@ -61,7 +61,7 @@ public class SqlEntityProcessor extends EntityProcessorBase {
} catch (DataImportHandlerException e) {
throw e;
} catch (Exception e) {
log.error( "The query failed '" + q + "'", e);
log.error( "The query failed '{}'", q, e);
throw new DataImportHandlerException(DataImportHandlerException.SEVERE, e);
}
}
@ -103,8 +103,10 @@ public class SqlEntityProcessor extends EntityProcessorBase {
String parentDeltaQuery = context.getEntityAttribute(PARENT_DELTA_QUERY);
if (parentDeltaQuery == null)
return null;
log.info("Running parentDeltaQuery for Entity: "
+ context.getEntityAttribute("name"));
if (log.isInfoEnabled()) {
log.info("Running parentDeltaQuery for Entity: {}"
, context.getEntityAttribute("name"));
}
initQuery(context.replaceTokens(parentDeltaQuery));
}
return getNext();
@ -119,7 +121,7 @@ public class SqlEntityProcessor extends EntityProcessorBase {
String deltaImportQuery = context.getEntityAttribute(DELTA_IMPORT_QUERY);
if(deltaImportQuery != null) return deltaImportQuery;
}
log.warn("'deltaImportQuery' attribute is not specified for entity : "+ entityName);
log.warn("'deltaImportQuery' attribute is not specified for entity : {}", entityName);
return getDeltaImportQuery(queryString);
}

View File

@ -76,8 +76,8 @@ public class TemplateTransformer extends Transformer {
}
for (String v : variables) {
if (resolver.resolve(v) == null) {
log.warn("Unable to resolve variable: " + v
+ " while parsing expression: " + expr);
log.warn("Unable to resolve variable: {} while parsing expression: {}"
,v , expr);
resolvable = false;
}
}

View File

@ -72,14 +72,14 @@ public class URLDataSource extends DataSource<Reader> {
try {
connectionTimeout = Integer.parseInt(cTimeout);
} catch (NumberFormatException e) {
log.warn("Invalid connection timeout: " + cTimeout);
log.warn("Invalid connection timeout: {}", cTimeout);
}
}
if (rTimeout != null) {
try {
readTimeout = Integer.parseInt(rTimeout);
} catch (NumberFormatException e) {
log.warn("Invalid read timeout: " + rTimeout);
log.warn("Invalid read timeout: {}", rTimeout);
}
}
}
@ -91,7 +91,7 @@ public class URLDataSource extends DataSource<Reader> {
if (URIMETHOD.matcher(query).find()) url = new URL(query);
else url = new URL(baseUrl + query);
log.debug("Accessing URL: " + url.toString());
log.debug("Accessing URL: {}", url);
URLConnection conn = url.openConnection();
conn.setConnectTimeout(connectionTimeout);

View File

@ -136,8 +136,9 @@ public class XPathEntityProcessor extends EntityProcessorBase {
// some XML parsers are broken and don't close the byte stream (but they should according to spec)
IOUtils.closeQuietly(xsltSource.getInputStream());
}
log.info("Using xslTransformer: "
+ xslTransformer.getClass().getName());
if (log.isInfoEnabled()) {
log.info("Using xslTransformer: {}", xslTransformer.getClass().getName());
}
} catch (Exception e) {
throw new DataImportHandlerException(SEVERE,
"Error initializing XSL ", e);
@ -293,10 +294,12 @@ public class XPathEntityProcessor extends EntityProcessorBase {
if (ABORT.equals(onError)) {
wrapAndThrow(SEVERE, e);
} else if (SKIP.equals(onError)) {
if (log.isDebugEnabled()) log.debug("Skipping url : " + s, e);
if (log.isDebugEnabled()) {
log.debug("Skipping url : {}", s, e);
}
wrapAndThrow(DataImportHandlerException.SKIP, e);
} else {
log.warn("Failed for url : " + s, e);
log.warn("Failed for url : {}", s, e);
rowIterator = Collections.EMPTY_LIST.iterator();
return;
}
@ -313,7 +316,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
} else if (SKIP.equals(onError)) {
wrapAndThrow(DataImportHandlerException.SKIP, e);
} else {
log.warn("Failed for url : " + s, e);
log.warn("Failed for url : {}", s, e);
rowIterator = Collections.EMPTY_LIST.iterator();
return;
}

View File

@ -645,7 +645,7 @@ public class XPathRecordReader {
} catch (IllegalArgumentException ex) {
// Other implementations will likely throw this exception since "reuse-instance"
// isimplementation specific.
log.debug("Unable to set the 'reuse-instance' property for the input chain: " + factory);
log.debug("Unable to set the 'reuse-instance' property for the input chain: {}", factory);
}
}

View File

@ -74,7 +74,7 @@ public class ZKPropertiesWriter extends SimplePropertiesWriter {
zkClient.setData(path, bytes, false);
} catch (Exception e) {
SolrZkClient.checkInterrupted(e);
log.warn("Could not persist properties to " + path + " :" + e.getClass(), e);
log.warn("Could not persist properties to {} : {}", path, e.getClass(), e);
}
}
@ -88,7 +88,7 @@ public class ZKPropertiesWriter extends SimplePropertiesWriter {
}
} catch (Exception e) {
SolrZkClient.checkInterrupted(e);
log.warn("Could not read DIH properties from " + path + " :" + e.getClass(), e);
log.warn("Could not read DIH properties from {} : {}", path, e.getClass(), e);
}
return propertiesToMap(props);
}

View File

@ -106,7 +106,9 @@ public class DIHConfiguration {
SchemaField sf = entry.getValue();
if (!fields.containsKey(sf.getName())) {
if (sf.isRequired()) {
log.info(sf.getName() + " is a required field in SolrSchema . But not found in DataConfig");
if (log.isInfoEnabled()) {
log.info("{} is a required field in SolrSchema . But not found in DataConfig", sf.getName());
}
}
}
}
@ -114,7 +116,9 @@ public class DIHConfiguration {
EntityField fld = entry.getValue();
SchemaField field = getSchemaField(fld.getName());
if (field == null && !isSpecialCommand(fld.getName())) {
log.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema");
if (log.isInfoEnabled()) {
log.info("The field :{} present in DataConfig does not have a counterpart in Solr Schema", fld.getName());
}
}
}
}

View File

@ -112,7 +112,7 @@ public abstract class AbstractSqlEntityProcessorTestCase extends
for(Map.Entry<String,Object> entry : props.entrySet()) {
sb.append(" > key=" + entry.getKey() + " / value=" + entry.getValue() + "\n");
}
log.debug(sb.toString());
log.debug("{}", sb);
}
}
@ -465,9 +465,10 @@ public abstract class AbstractSqlEntityProcessorTestCase extends
// One second in the future ensures a change time after the last import (DIH
// uses second precision only)
Timestamp theTime = new Timestamp(System.currentTimeMillis() + 1000);
log.debug("PEOPLE UPDATE USING TIMESTAMP: "
+ new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT)
.format(theTime));
if (log.isDebugEnabled()) {
log.debug("PEOPLE UPDATE USING TIMESTAMP: {}"
, new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(theTime));
}
try {
conn = newConnection();
change = conn
@ -537,9 +538,10 @@ public abstract class AbstractSqlEntityProcessorTestCase extends
// One second in the future ensures a change time after the last import (DIH
// uses second precision only)
Timestamp theTime = new Timestamp(System.currentTimeMillis() + 1000);
log.debug("COUNTRY UPDATE USING TIMESTAMP: "
+ new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT)
.format(theTime));
if (log.isDebugEnabled()) {
log.debug("COUNTRY UPDATE USING TIMESTAMP: {}"
, new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(theTime));
}
try {
conn = newConnection();
change = conn
@ -720,9 +722,10 @@ public abstract class AbstractSqlEntityProcessorTestCase extends
s.executeUpdate("create table countries(code varchar(3) not null primary key, country_name varchar(50), deleted char(1) default 'N', last_modified timestamp not null)");
s.executeUpdate("create table people(id int not null primary key, name varchar(50), country_code char(2), deleted char(1) default 'N', last_modified timestamp not null)");
s.executeUpdate("create table people_sports(id int not null primary key, person_id int, sport_name varchar(50), deleted char(1) default 'N', last_modified timestamp not null)");
log.debug("INSERTING DB DATA USING TIMESTAMP: "
+ new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT)
.format(theTime));
if (log.isDebugEnabled()) {
log.debug("INSERTING DB DATA USING TIMESTAMP: {}",
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(theTime));
}
ps = conn
.prepareStatement("insert into countries (code, country_name, last_modified) values (?,?,?)");
for (String[] country : countries) {

View File

@ -44,7 +44,7 @@ public class TestSortedMapBackedCache extends AbstractDIHCacheTestCase {
List<ControlData> testData = extractDataByKeyLookup(cache, fieldNames);
compareData(data, testData);
} catch (Exception e) {
log.warn("Exception thrown: " + e.toString());
log.warn("Exception thrown: {}", e);
Assert.fail();
} finally {
try {
@ -64,7 +64,7 @@ public class TestSortedMapBackedCache extends AbstractDIHCacheTestCase {
List<ControlData> testData = extractDataInKeyOrder(cache, fieldNames);
compareData(data, testData);
} catch (Exception e) {
log.warn("Exception thrown: " + e.toString());
log.warn("Exception thrown: {}", e);
Assert.fail();
} finally {
try {
@ -180,7 +180,7 @@ public class TestSortedMapBackedCache extends AbstractDIHCacheTestCase {
compareData(newControlData, testData);
} catch (Exception e) {
log.warn("Exception thrown: " + e.toString());
log.warn("Exception thrown: {}", e);
Assert.fail();
} finally {
try {

View File

@ -49,7 +49,7 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC
logPropertiesFile();
changeStuff();
int c = calculateDatabaseCalls();
log.debug("testSingleEntity delta-import (" + c + " database calls expected)...");
log.debug("testSingleEntity delta-import ({} database calls expected)...", c);
singleEntity(c);
validateChanges();
}
@ -76,7 +76,7 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC
changeStuff();
int c = calculateDatabaseCalls();
simpleTransform(c);
log.debug("testWithSimpleTransformer delta-import (" + c + " database calls expected)...");
log.debug("testWithSimpleTransformer delta-import ({} database calls expected)...", c);
validateChanges();
}
@Test
@ -86,7 +86,7 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC
logPropertiesFile();
changeStuff();
int c = calculateDatabaseCalls();
log.debug("testWithComplexTransformer delta-import (" + c + " database calls expected)...");
log.debug("testWithComplexTransformer delta-import ({} database calls expected)...", c);
complexTransform(c, personChanges.deletedKeys.length);
validateChanges();
}
@ -94,7 +94,7 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC
public void testChildEntities() throws Exception {
log.debug("testChildEntities full-import...");
useParentDeltaQueryParam = random().nextBoolean();
log.debug("using parent delta? " + useParentDeltaQueryParam);
log.debug("using parent delta? {}", useParentDeltaQueryParam);
withChildEntities(false, true);
logPropertiesFile();
changeStuff();
@ -168,13 +168,13 @@ public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestC
sb.append(s).append(" ");
}
sb.append(" }");
log.debug(sb.toString());
log.debug("{}", sb);
}
}
private void personChangesLog()
{
if(personChanges!=null) {
log.debug("person changes { " + personChanges.toString() + " } ");
log.debug("person changes [ {} ] ", personChanges);
}
}
@Override

View File

@ -215,7 +215,7 @@ public class ExtractingDocumentLoader extends ContentStreamLoader {
if(pwMapFile != null && pwMapFile.length() > 0) {
InputStream is = req.getCore().getResourceLoader().openResource(pwMapFile);
if(is != null) {
log.debug("Password file supplied: "+pwMapFile);
log.debug("Password file supplied: {}", pwMapFile);
epp.parse(is);
}
}
@ -223,13 +223,13 @@ public class ExtractingDocumentLoader extends ContentStreamLoader {
String resourcePassword = params.get(ExtractingParams.RESOURCE_PASSWORD);
if(resourcePassword != null) {
epp.setExplicitPassword(resourcePassword);
log.debug("Literal password supplied for file "+resourceName);
log.debug("Literal password supplied for file {}", resourceName);
}
parser.parse(inputStream, parsingHandler, metadata, context);
} catch (TikaException e) {
if(ignoreTikaException)
log.warn(new StringBuilder("skip extracting text due to ").append(e.getLocalizedMessage())
.append(". metadata=").append(metadata.toString()).toString());
.append(". metadata=").append(metadata.toString()).toString()); // logOk
else
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}

View File

@ -88,7 +88,7 @@ public class RegexRulesPasswordProvider implements PasswordProvider {
continue;
int sep = line.indexOf("=");
if(sep <= 0) {
log.warn("Wrong format of password line "+linenum);
log.warn("Wrong format of password line {}", linenum);
continue;
}
String pass = line.substring(sep+1).trim();
@ -97,7 +97,7 @@ public class RegexRulesPasswordProvider implements PasswordProvider {
Pattern pattern = Pattern.compile(regex);
rules.put(pattern, pass);
} catch(PatternSyntaxException pse) {
log.warn("Key of line "+linenum+" was not a valid regex pattern", pse);
log.warn("Key of line {} was not a valid regex pattern{}", linenum, pse);
continue;
}
}

View File

@ -149,7 +149,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
if(keyVal.length == 2) {
lcMap.put(keyVal[0], keyVal[1]);
} else {
log.error("Unsupported format for langid.lcmap: "+mapping+". Skipping this mapping.");
log.error("Unsupported format for langid.lcmap: {}. Skipping this mapping.", mapping);
}
}
}
@ -162,7 +162,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
if(keyVal.length == 2) {
mapLcMap.put(keyVal[0], keyVal[1]);
} else {
log.error("Unsupported format for langid.map.lcmap: "+mapping+". Skipping this mapping.");
log.error("Unsupported format for langid.map.lcmap: {}. Skipping this mapping.", mapping);
}
}
}
@ -175,13 +175,15 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
if (maxFieldValueChars > maxTotalChars) {
if (maxTotalChars == MAX_TOTAL_CHARS_DEFAULT) {
// If the user specified only maxFieldValueChars, make maxTotalChars the same as it
log.warn(MAX_FIELD_VALUE_CHARS + " (" + maxFieldValueChars + ") is less than " + MAX_TOTAL_CHARS + " ("
+ maxTotalChars + "). Setting " + MAX_TOTAL_CHARS + " to " + maxFieldValueChars + ".");
log.warn("{} ({}) is less than {} ({}). Setting {} to {}."
, MAX_FIELD_VALUE_CHARS, maxFieldValueChars, MAX_TOTAL_CHARS
, maxTotalChars, MAX_TOTAL_CHARS, maxFieldValueChars);
maxTotalChars = maxFieldValueChars;
} else {
// If the user specified maxTotalChars, make maxFieldValueChars the same as it
log.warn(MAX_FIELD_VALUE_CHARS + " (" + maxFieldValueChars + ") is less than " + MAX_TOTAL_CHARS + " ("
+ maxTotalChars + "). Setting " + MAX_FIELD_VALUE_CHARS + " to " + maxTotalChars + ".");
log.warn("{} ({}) is less than {} ({}). Setting {} to {}."
, MAX_FIELD_VALUE_CHARS, maxFieldValueChars, MAX_TOTAL_CHARS
, maxTotalChars, MAX_FIELD_VALUE_CHARS, maxTotalChars );
maxFieldValueChars = maxTotalChars;
}
}
@ -219,10 +221,14 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
List<DetectedLanguage> languagelist = detectLanguage(doc);
docLang = resolveLanguage(languagelist, fallbackLang);
docLangs.add(docLang);
log.debug("Detected main document language from fields "+ Arrays.toString(inputFields) +": "+docLang);
if (log.isDebugEnabled()) {
log.debug("Detected main document language from fields {}: {}", Arrays.toString(inputFields), docLang);
}
if(doc.containsKey(langField) && overwrite) {
log.debug("Overwritten old value "+doc.getFieldValue(langField));
if (log.isDebugEnabled()) {
log.debug("Overwritten old value {}", doc.getFieldValue(langField));
}
}
if(langField != null && langField.length() != 0) {
doc.setField(langField, docLang);
@ -231,7 +237,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
// langField is set, we sanity check it against whitelist and fallback
docLang = resolveLanguage(doc.getFieldValue(langField).toString(), fallbackLang);
docLangs.add(docLang);
log.debug("Field "+langField+" already contained value "+docLang+", not overwriting.");
log.debug("Field {} already contained value {}, not overwriting.", langField, docLang);
}
if(enableMapping) {
@ -242,15 +248,17 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
List<DetectedLanguage> languagelist = detectLanguage(solrDocReader(doc, new String[]{fieldName}));
fieldLang = resolveLanguage(languagelist, docLang);
docLangs.add(fieldLang);
log.debug("Mapping field "+fieldName+" using individually detected language "+fieldLang);
log.debug("Mapping field {} using individually detected language {}", fieldName, fieldLang);
} else {
fieldLang = docLang;
log.debug("Mapping field "+fieldName+" using document global language "+fieldLang);
log.debug("Mapping field {} using document global language {}", fieldName, fieldLang);
}
String mappedOutputField = getMappedField(fieldName, fieldLang);
if (mappedOutputField != null) {
log.debug("Mapping field {} to {}", doc.getFieldValue(docIdField), fieldLang);
if (log.isDebugEnabled()) {
log.debug("Mapping field {} to {}", doc.getFieldValue(docIdField), fieldLang);
}
SolrInputField inField = doc.getField(fieldName);
doc.setField(mappedOutputField, inField.getValue());
if(!mapKeepOrig) {
@ -282,12 +290,12 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
for(String field : fallbackFields) {
if(doc.containsKey(field)) {
lang = (String) doc.getFieldValue(field);
log.debug("Language fallback to field "+field);
log.debug("Language fallback to field {}", field);
break;
}
}
if(lang == null) {
log.debug("Language fallback to value "+fallbackValue);
log.debug("Language fallback to value {}", fallbackValue);
lang = fallbackValue;
}
return lang;
@ -337,7 +345,9 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
DetectedLanguage lang = languages.get(0);
String normalizedLang = normalizeLangCode(lang.getLangCode());
if(langWhitelist.isEmpty() || langWhitelist.contains(normalizedLang)) {
log.debug("Language detected {} with certainty {}", normalizedLang, lang.getCertainty());
if (log.isDebugEnabled()) {
log.debug("Language detected {} with certainty {}", normalizedLang, lang.getCertainty());
}
if(lang.getCertainty() >= threshold) {
langStr = normalizedLang;
} else {
@ -345,7 +355,9 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
langStr = fallbackLang;
}
} else {
log.debug("Detected a language not in whitelist ({}), using fallback {}", lang.getLangCode(), fallbackLang);
if (log.isDebugEnabled()) {
log.debug("Detected a language not in whitelist ({}), using fallback {}", lang.getLangCode(), fallbackLang);
}
langStr = fallbackLang;
}
}
@ -366,7 +378,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
protected String normalizeLangCode(String langCode) {
if (lcMap.containsKey(langCode)) {
String lc = lcMap.get(langCode);
log.debug("Doing langcode normalization mapping from "+langCode+" to "+lc);
log.debug("Doing langcode normalization mapping from {} to {}", langCode, lc);
return lc;
}
return langCode;
@ -389,7 +401,7 @@ public abstract class LanguageIdentifierUpdateProcessor extends UpdateRequestPro
log.warn("Unsuccessful field name mapping from {} to {}, field does not exist and enforceSchema=true; skipping mapping.", currentField, newFieldName);
return null;
} else {
log.debug("Doing mapping from "+currentField+" with language "+language+" to field "+newFieldName);
log.debug("Doing mapping from {} with language {} to field {}", currentField, language, newFieldName);
}
return newFieldName;
}

View File

@ -58,7 +58,10 @@ public class TikaLanguageIdentifierUpdateProcessor extends LanguageIdentifierUpd
certainty = 0d;
DetectedLanguage language = new DetectedLanguage(identifier.getLanguage(), certainty);
languages.add(language);
log.debug("Language detected as "+language+" with a certainty of "+language.getCertainty()+" (Tika distance="+identifier.toString()+")");
if (log.isDebugEnabled()) {
log.debug("Language detected as {} with a certainty of {} (Tika distance={})"
, language, language.getCertainty(), identifier);
}
} else {
log.debug("No input text to detect language from, returning empty list");
}

View File

@ -141,7 +141,9 @@ public class ManagedModelStore extends ManagedResource implements ManagedResourc
public synchronized void addModel(LTRScoringModel ltrScoringModel) throws ModelException {
try {
log.info("adding model {}", ltrScoringModel.getName());
if (log.isInfoEnabled()) {
log.info("adding model {}", ltrScoringModel.getName());
}
store.addModel(ltrScoringModel);
} catch (final ModelException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);

View File

@ -248,8 +248,10 @@ public class TestLTRReRankingPipeline extends SolrTestCase {
hits = new TopDocs(hits.totalHits, slice);
hits = rescorer.rescore(searcher, hits, topN);
for (int i = topN - 1, j = 0; i >= 0; i--, j++) {
log.info("doc {} in pos {}", searcher.doc(hits.scoreDocs[j].doc)
.get("id"), j);
if (log.isInfoEnabled()) {
log.info("doc {} in pos {}", searcher.doc(hits.scoreDocs[j].doc)
.get("id"), j);
}
assertEquals(i,
Integer.parseInt(searcher.doc(hits.scoreDocs[j].doc).get("id")));

View File

@ -159,13 +159,15 @@ public class TestRerankBase extends RestTestBase {
}
if (fstore.exists()) {
log.info("remove feature store config file in {}",
fstore.getAbsolutePath());
if (log.isInfoEnabled()) {
log.info("remove feature store config file in {}", fstore.getAbsolutePath());
}
Files.delete(fstore.toPath());
}
if (mstore.exists()) {
log.info("remove model store config file in {}",
mstore.getAbsolutePath());
if (log.isInfoEnabled()) {
log.info("remove model store config file in {}", mstore.getAbsolutePath());
}
Files.delete(mstore.toPath());
}
if (!solrconfig.equals("solrconfig.xml")) {

View File

@ -208,7 +208,7 @@ public class SolrExporter {
solrExporter.start();
log.info("Solr Prometheus Exporter is running");
} catch (IOException e) {
log.error("Failed to start Solr Prometheus Exporter: " + e.toString());
log.error("Failed to start Solr Prometheus Exporter: ", e);
} catch (ArgumentParserException e) {
parser.handleError(e);
}

View File

@ -106,7 +106,7 @@ public abstract class SolrScraper implements Closeable {
queryResponse = client.request(queryRequest, query.getCollection().get());
}
} catch (SolrServerException | IOException e) {
log.error("failed to request: " + queryRequest.getPath() + " " + e.getMessage());
log.error("failed to request: {} {}", queryRequest.getPath(), e.getMessage());
}
JsonNode jsonNode = OBJECT_MAPPER.readTree((String) queryResponse.get("response"));
@ -164,7 +164,7 @@ public abstract class SolrScraper implements Closeable {
name, labelNames, labelValues, value));
}
} catch (JsonQueryException e) {
log.error("Error apply JSON query={} to result", jsonQuery.toString(), e);
log.error("Error apply JSON query={} to result", jsonQuery, e);
scrapeErrorTotal.inc();
}
}

View File

@ -103,11 +103,11 @@ public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAwar
if (templateBaseDir != null && !templateBaseDir.isEmpty()) {
fileResourceLoaderBaseDir = new File(templateBaseDir).getAbsoluteFile();
if (!fileResourceLoaderBaseDir.exists()) { // "*not* exists" condition!
log.warn(TEMPLATE_BASE_DIR + " specified does not exist: " + fileResourceLoaderBaseDir);
log.warn("{} specified does not exist: {}", TEMPLATE_BASE_DIR, fileResourceLoaderBaseDir);
fileResourceLoaderBaseDir = null;
} else {
if (!fileResourceLoaderBaseDir.isDirectory()) { // "*not* a directory" condition
log.warn(TEMPLATE_BASE_DIR + " specified is not a directory: " + fileResourceLoaderBaseDir);
log.warn("{} specified is not a directory: {}", TEMPLATE_BASE_DIR, fileResourceLoaderBaseDir);
fileResourceLoaderBaseDir = null;
}
}
@ -132,7 +132,7 @@ public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAwar
try {
velocityInitProps.load(new InputStreamReader(core.getResourceLoader().openResource(initPropertiesFileName), StandardCharsets.UTF_8));
} catch (IOException e) {
log.warn("Error loading " + PROPERTIES_FILE + " specified property file: " + initPropertiesFileName, e);
log.warn("Error loading {} specified property file: {}", PROPERTIES_FILE, initPropertiesFileName, e);
}
}
}

View File

@ -326,7 +326,8 @@ public class Assign {
public static List<ReplicaPosition> getNodesForNewReplicas(ClusterState clusterState, String collectionName,
String shard, int nrtReplicas, int tlogReplicas, int pullReplicas,
Object createNodeSet, SolrCloudManager cloudManager) throws IOException, InterruptedException, AssignmentException {
log.debug("getNodesForNewReplicas() shard: {} , nrtReplicas : {} , tlogReplicas: {} , pullReplicas: {} , createNodeSet {}", shard, nrtReplicas, tlogReplicas, pullReplicas, createNodeSet);
log.debug("getNodesForNewReplicas() shard: {} , nrtReplicas : {} , tlogReplicas: {} , pullReplicas: {} , createNodeSet {}"
, shard, nrtReplicas, tlogReplicas, pullReplicas, createNodeSet);
DocCollection coll = clusterState.getCollection(collectionName);
Integer maxShardsPerNode = coll.getMaxShardsPerNode() == -1 ? Integer.MAX_VALUE : coll.getMaxShardsPerNode();
List<String> createNodeList = null;

View File

@ -896,9 +896,9 @@ public class CoreContainer {
private void warnUsersOfInsecureSettings() {
if (authenticationPlugin == null || authorizationPlugin == null) {
log.warn("Not all security plugins configured! authentication={} authorization={}. Solr is only as secure as {}{}"
, "you make it. Consider configuring authentication/authorization before exposing Solr to users internal or "
, "external. See https://s.apache.org/solrsecurity for more info",
log.warn("Not all security plugins configured! authentication={} authorization={}. Solr is only as secure as " +
"you make it. Consider configuring authentication/authorization before exposing Solr to users internal or " +
"external. See https://s.apache.org/solrsecurity for more info",
(authenticationPlugin != null) ? "enabled" : "disabled",
(authorizationPlugin != null) ? "enabled" : "disabled");
}

View File

@ -233,8 +233,7 @@ public class FileUtil {
}
final boolean ex = f.exists();
if (doLog && ex) {
LOG.warn("Failed to delete file or dir ["
+ f.getAbsolutePath() + "]: it still exists.");
LOG.warn("Failed to delete file or dir [{}]: it still exists.", f.getAbsolutePath());
}
return !ex;
}
@ -747,7 +746,9 @@ public class FileUtil {
new IOUtils.NullOutputStream());
}
} catch (IOException e) {
LOG.debug(e.getMessage());
if (LOG.isDebugEnabled()) {
LOG.debug(e.getMessage());
}
}
});
Future error = executor.submit(() -> {
@ -770,7 +771,9 @@ public class FileUtil {
new IOUtils.NullOutputStream());
}
} catch (IOException e) {
LOG.debug(e.getMessage());
if (LOG.isDebugEnabled()) {
LOG.debug(e.getMessage());
}
}
});
@ -1042,8 +1045,7 @@ public class FileUtil {
public static int symLink(String target, String linkname) throws IOException{
if (target == null || linkname == null) {
LOG.warn("Can not create a symLink with a target = " + target
+ " and link =" + linkname);
LOG.warn("Can not create a symLink with a target = {} and link = {}", target, linkname);
return 1;
}
@ -1080,14 +1082,13 @@ public class FileUtil {
+ "administrators and all non-administrators from creating symbolic links. "
+ "This behavior can be changed in the Local Security Policy management console");
} else if (returnVal != 0) {
LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed "
+ returnVal + " with: " + ec.getMessage());
LOG.warn("Command '{}' failed {} with: {}",StringUtils.join(" ", cmd)
, returnVal, ec.getMessage());
}
return returnVal;
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Error while create symlink " + linkname + " to " + target
+ "." + " Exception: " + StringUtils.stringifyException(e));
LOG.debug("Error while create symlink {} to {}. Exception: {}", linkname, target, StringUtils.stringifyException(e));
}
throw e;
}
@ -1126,8 +1127,7 @@ public class FileUtil {
shExec.execute();
}catch(IOException e) {
if(LOG.isDebugEnabled()) {
LOG.debug("Error while changing permission : " + filename
+" Exception: " + StringUtils.stringifyException(e));
LOG.debug("Error while changing permission : {} Exception: {}", filename, StringUtils.stringifyException(e));
}
}
return shExec.getExitCode();
@ -1501,7 +1501,7 @@ public class FileUtil {
// then this is acceptable. If it returns false due to some other I/O
// error, then this method will fail later with an IOException while saving
// the jar.
LOG.debug("mkdirs false for " + workingDir + ", execution will continue");
LOG.debug("mkdirs false for {}, execution will continue", workingDir);
}
StringBuilder unexpandedWildcardClasspath = new StringBuilder();

View File

@ -418,7 +418,7 @@ public class BlockPoolSlice {
try {
fileIoProvider.mkdirsWithExistsCheck(volume, targetDir);
} catch(IOException ioe) {
LOG.warn("Failed to mkdirs " + targetDir);
LOG.warn("Failed to mkdirs {}", targetDir);
continue;
}
@ -426,8 +426,7 @@ public class BlockPoolSlice {
try {
fileIoProvider.rename(volume, metaFile, targetMetaFile);
} catch (IOException e) {
LOG.warn("Failed to move meta file from "
+ metaFile + " to " + targetMetaFile, e);
LOG.warn("Failed to move meta file from {} to {}", metaFile, targetMetaFile, e);
continue;
}
@ -435,8 +434,7 @@ public class BlockPoolSlice {
try {
fileIoProvider.rename(volume, blockFile, targetBlockFile);
} catch (IOException e) {
LOG.warn("Failed to move block file from "
+ blockFile + " to " + targetBlockFile, e);
LOG.warn("Failed to move block file from {} to {}", blockFile, targetBlockFile, e);
continue;
}
@ -444,7 +442,7 @@ public class BlockPoolSlice {
++numRecovered;
} else {
// Failure should be rare.
LOG.warn("Failed to move " + blockFile + " to " + targetDir);
LOG.warn("Failed to move {} to {}", blockFile, targetDir);
}
}
}
@ -655,8 +653,7 @@ public class BlockPoolSlice {
replicaToDelete = (replicaToKeep == replica1) ? replica2 : replica1;
if (LOG.isDebugEnabled()) {
LOG.debug("resolveDuplicateReplicas decide to keep " + replicaToKeep
+ ". Will try to delete " + replicaToDelete);
LOG.debug("resolveDuplicateReplicas decide to keep {}. Will try to delete {}", replicaToKeep, replicaToDelete);
}
return replicaToDelete;
}
@ -664,10 +661,10 @@ public class BlockPoolSlice {
private void deleteReplica(final ReplicaInfo replicaToDelete) {
// Delete the files on disk. Failure here is okay.
if (!replicaToDelete.deleteBlockData()) {
LOG.warn("Failed to delete block file for replica " + replicaToDelete);
LOG.warn("Failed to delete block file for replica {}", replicaToDelete);
}
if (!replicaToDelete.deleteMetadata()) {
LOG.warn("Failed to delete meta file for replica " + replicaToDelete);
LOG.warn("Failed to delete meta file for replica {}", replicaToDelete);
}
}
@ -765,18 +762,21 @@ public class BlockPoolSlice {
File replicaFile = new File(currentDir, REPLICA_CACHE_FILE);
// Check whether the file exists or not.
if (!replicaFile.exists()) {
LOG.info("Replica Cache file: "+ replicaFile.getPath() +
" doesn't exist ");
if (LOG.isInfoEnabled()) {
LOG.info("Replica Cache file: {} doesn't exist", replicaFile.getPath());
}
return false;
}
long fileLastModifiedTime = replicaFile.lastModified();
if (System.currentTimeMillis() > fileLastModifiedTime + replicaCacheExpiry) {
LOG.info("Replica Cache file: " + replicaFile.getPath() +
" has gone stale");
if (LOG.isInfoEnabled()) {
LOG.info("Replica Cache file: {} has gone stale", replicaFile.getPath());
}
// Just to make findbugs happy
if (!replicaFile.delete()) {
LOG.info("Replica Cache file: " + replicaFile.getPath() +
" cannot be deleted");
if (LOG.isInfoEnabled()) {
LOG.info("Replica Cache file: {} cannot be deleted", replicaFile.getPath());
}
}
return false;
}
@ -814,14 +814,16 @@ public class BlockPoolSlice {
iter.remove();
volumeMap.add(bpid, info);
}
LOG.info("Successfully read replica from cache file : "
+ replicaFile.getPath());
if (LOG.isInfoEnabled()) {
LOG.info("Successfully read replica from cache file : {}", replicaFile.getPath());
}
return true;
} catch (Exception e) {
// Any exception we need to revert back to read from disk
// Log the error and return false
LOG.info("Exception occurred while reading the replicas cache file: "
+ replicaFile.getPath(), e );
if (LOG.isInfoEnabled()) {
LOG.info("Exception occurred while reading the replicas cache file: {}", replicaFile.getPath(), e);
}
return false;
}
finally {
@ -829,8 +831,9 @@ public class BlockPoolSlice {
IOUtils.closeStream(inputStream);
if (!fileIoProvider.delete(volume, replicaFile)) {
LOG.info("Failed to delete replica cache file: " +
replicaFile.getPath());
if (LOG.isInfoEnabled()) {
LOG.info("Failed to delete replica cache file: {}", replicaFile.getPath());
}
}
}
}
@ -922,8 +925,7 @@ public class BlockPoolSlice {
addToReplicasMap(volumeMap, dir, lazyWriteReplicaMap, isFinalized,
exceptions, subTaskQueue);
} catch (IOException e) {
LOG.warn("Caught exception while adding replicas from " + volume
+ " in subtask. Will throw later.", e);
LOG.warn("Caught exception while adding replicas from {} in subtask. Will throw later.", volume, e);
exceptions.add(e);
}
}

View File

@ -523,7 +523,7 @@ public final class HttpServer2 implements FilterContainer {
if(null != excludeCiphers && !excludeCiphers.isEmpty()) {
sslContextFactory.setExcludeCipherSuites(
StringUtils.getTrimmedStrings(excludeCiphers));
LOG.info("Excluded Cipher List:" + excludeCiphers);
LOG.info("Excluded Cipher List:{}", excludeCiphers);
}
conn.addFirstConnectionFactory(new SslConnectionFactory(sslContextFactory,
@ -610,7 +610,7 @@ public final class HttpServer2 implements FilterContainer {
if (pathSpecs != null) {
for (String path : pathSpecs) {
LOG.info("adding path spec: " + path);
LOG.info("adding path spec: {}", path);
addFilterPathMapping(path, webAppContext);
}
}
@ -782,8 +782,8 @@ public final class HttpServer2 implements FilterContainer {
*/
public void addJerseyResourcePackage(final String packageName,
final String pathSpec) {
LOG.info("addJerseyResourcePackage: packageName=" + packageName
+ ", pathSpec=" + pathSpec);
LOG.info("addJerseyResourcePackage: packageName={}, pathcpec={}"
, packageName, pathSpec);
final ServletHolder sh = new ServletHolder(ServletContainer.class);
sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
"com.sun.jersey.api.core.PackagesResourceConfig");
@ -845,9 +845,10 @@ public final class HttpServer2 implements FilterContainer {
for (int i = 0; i < servletMappings.length; i++) {
if (servletMappings[i].containsPathSpec(pathSpec)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Found existing " + servletMappings[i].getServletName() +
" servlet at path " + pathSpec + "; will replace mapping" +
" with " + holder.getName() + " servlet");
LOG.debug("Found existing {} servlet at path {}; will replace mapping with {} servlet"
, servletMappings[i].getServletName()
, pathSpec
, holder.getName());
}
ServletMapping[] newServletMappings =
ArrayUtil.removeFromArray(servletMappings, servletMappings[i]);
@ -859,7 +860,7 @@ public final class HttpServer2 implements FilterContainer {
webAppContext.addServlet(holder, pathSpec);
if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
LOG.info("Adding Kerberos (SPNEGO) filter to {}", name);
ServletHandler handler = webAppContext.getServletHandler();
FilterMapping fmap = new FilterMapping();
fmap.setPathSpec(pathSpec);
@ -894,9 +895,8 @@ public final class HttpServer2 implements FilterContainer {
for (int i = 0; i < servletMappings.length; i++) {
if (servletMappings[i].containsPathSpec(pathSpec)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Found existing " + servletMappings[i].getServletName() +
" servlet at path " + pathSpec + "; will replace mapping" +
" with " + sh.getName() + " servlet");
LOG.debug("Found existing {} servlet at path {}; will replace mapping with {} servlet"
, servletMappings[i].getServletName(), pathSpec, sh.getName());
}
ServletMapping[] newServletMappings =
ArrayUtil.removeFromArray(servletMappings, servletMappings[i]);
@ -936,9 +936,10 @@ public final class HttpServer2 implements FilterContainer {
final String[] USER_FACING_URLS = { "*.html", "*.jsp" };
FilterMapping fmap = getFilterMapping(name, USER_FACING_URLS);
defineFilter(webAppContext, filterHolder, fmap);
LOG.info(
"Added filter " + name + " (class=" + classname + ") to context "
+ webAppContext.getDisplayName());
if (LOG.isInfoEnabled()) {
LOG.info("Added filter {} (class={}) to context {}", name, classname
, webAppContext.getDisplayName());
}
final String[] ALL_URLS = { "/*" };
fmap = getFilterMapping(name, ALL_URLS);
for (Map.Entry<ServletContextHandler, Boolean> e
@ -946,8 +947,10 @@ public final class HttpServer2 implements FilterContainer {
if (e.getValue()) {
ServletContextHandler ctx = e.getKey();
defineFilter(ctx, filterHolder, fmap);
LOG.info("Added filter " + name + " (class=" + classname
+ ") to context " + ctx.getDisplayName());
if (LOG.isInfoEnabled()) {
LOG.info("Added filter {} (class={}) to context {}"
, name, classname, ctx.getDisplayName());
}
}
}
filterNames.add(name);
@ -963,7 +966,7 @@ public final class HttpServer2 implements FilterContainer {
for (ServletContextHandler ctx : defaultContexts.keySet()) {
defineFilter(ctx, filterHolder, fmap);
}
LOG.info("Added global filter '" + name + "' (class=" + classname + ")");
LOG.info("Added global filter {}' (class={})'", name, classname);
}
/**
@ -1179,7 +1182,9 @@ public final class HttpServer2 implements FilterContainer {
// failed to open w/o issuing a close first, even if the port is changed
listener.close();
listener.open();
LOG.info("Jetty bound to port " + listener.getLocalPort());
if (LOG.isInfoEnabled()) {
LOG.info("Jetty bound to port {}", listener.getLocalPort());
}
}
/**
@ -1286,9 +1291,7 @@ public final class HttpServer2 implements FilterContainer {
try {
c.close();
} catch (Exception e) {
LOG.error(
"Error while stopping listener for webapp"
+ webAppContext.getDisplayName(), e);
LOG.error("Error while stopping listener for webapp{}", webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
}
@ -1300,16 +1303,15 @@ public final class HttpServer2 implements FilterContainer {
webAppContext.clearAttributes();
webAppContext.stop();
} catch (Exception e) {
LOG.error("Error while stopping web app context for webapp "
+ webAppContext.getDisplayName(), e);
LOG.error("Error while stopping web app context for webapp {}", webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
try {
webServer.stop();
} catch (Exception e) {
LOG.error("Error while stopping web server for webapp "
+ webAppContext.getDisplayName(), e);
LOG.error("Error while stopping web server for webapp {}"
, webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
@ -1415,8 +1417,7 @@ public final class HttpServer2 implements FilterContainer {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Unauthenticated users are not " +
"authorized to access this page.");
LOG.warn("User " + remoteUser + " is unauthorized to access the page "
+ request.getRequestURI() + ".");
LOG.warn("User {} is unauthorized to access the page {}.", remoteUser, request.getRequestURI());
return false;
}

View File

@ -918,12 +918,8 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
Object realResponse = Utils.fromJSONString(strResponse);
String err = JSONTestUtil.matchObj("/grouped/" + groupField, realResponse, modelResponse);
if (err != null) {
log.error("GROUPING MISMATCH (" + queryIter + "): " + err
+ "\n\trequest="+req
+ "\n\tresult="+strResponse
+ "\n\texpected="+ Utils.toJSONString(modelResponse)
+ "\n\tsorted_model="+ sortedGroups
);
log.error("GROUPING MISMATCH ({}}): {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tsorted_model={}"
, queryIter, err, req, strResponse, Utils.toJSONString(modelResponse), sortedGroups);
// re-execute the request... good for putting a breakpoint here for debugging
String rsp = h.query(req);
@ -934,12 +930,8 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
// assert post / pre grouping facets
err = JSONTestUtil.matchObj("/facet_counts/facet_fields/"+FOO_STRING_FIELD, realResponse, expectedFacetResponse);
if (err != null) {
log.error("GROUPING MISMATCH (" + queryIter + "): " + err
+ "\n\trequest="+req
+ "\n\tresult="+strResponse
+ "\n\texpected="+ Utils.toJSONString(expectedFacetResponse)
);
log.error("GROUPING MISMATCH ({}): {}\n\trequest={}\n\tresult={}\n\texpected={}"
, queryIter, err, req, strResponse, Utils.toJSONString(expectedFacetResponse));
// re-execute the request... good for putting a breakpoint here for debugging
h.query(req);
fail(err);

View File

@ -263,11 +263,8 @@ public class TestJoin extends SolrTestCaseJ4 {
Object realResponse = Utils.fromJSONString(strResponse);
String err = JSONTestUtil.matchObj("/response", realResponse, resultSet);
if (err != null) {
log.error("JOIN MISMATCH: " + err
+ "\n\trequest="+req
+ "\n\tresult="+strResponse
+ "\n\texpected="+ Utils.toJSONString(resultSet)
+ "\n\tmodel="+ model
log.error("JOIN MISMATCH: {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tmodel={}"
, err, req, strResponse, Utils.toJSONString(resultSet), model
);
// re-execute the request... good for putting a breakpoint here for debugging

View File

@ -286,11 +286,8 @@ public class TestRandomDVFaceting extends SolrTestCaseJ4 {
for (int i=1; i<responses.size(); i++) {
String err = JSONTestUtil.match("/", responses.get(i), responses.get(0), 0.0);
if (err != null) {
log.error("ERROR: mismatch facet response: " + err +
"\n expected =" + responses.get(0) +
"\n response = " + responses.get(i) +
"\n request = " + params
);
log.error("ERROR: mismatch facet response: {}\n expected ={}\n response = {}\n request = {}"
, err, responses.get(0), responses.get(i), params);
fail(err);
}
}

View File

@ -316,11 +316,8 @@ public class TestRandomFaceting extends SolrTestCaseJ4 {
String err = JSONTestUtil.match("/", actual, expected, 0.0);
if (err != null) {
log.error("ERROR: mismatch facet response: " + err +
"\n expected =" + expected +
"\n response = " + actual +
"\n request = " + params
);
log.error("ERROR: mismatch facet response: {}\n expected ={}\n response = {}\n request = {}"
, err, expected, actual, params);
fail(err);
}
}

View File

@ -66,7 +66,9 @@ public class AssignBackwardCompatibilityTest extends SolrCloudTestCase {
boolean clearedCounter = false;
for (int i = 0; i < numOperations; i++) {
log.info("Collection counter={} i={}", getCounter(), i);
if (log.isInfoEnabled()) {
log.info("Collection counter={} i={}", getCounter(), i);
}
boolean deleteReplica = random().nextBoolean() && numLiveReplicas > 1;
// No need to clear counter more than one time
if (random().nextBoolean() && i > 5 && !clearedCounter) {

View File

@ -291,8 +291,10 @@ public class ChaosMonkeyNothingIsSafeWithPullReplicasTest extends AbstractFullDi
.getResults().getNumFound();
assertTrue("Found " + ctrlDocs + " control docs", cloudClientDocs > 0);
log.info("collection state: " + printClusterStateInfo(DEFAULT_COLLECTION));
if (log.isInfoEnabled()) {
log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION));
}
if (VERBOSE) System.out.println("control docs:"
+ controlClient.query(new SolrQuery("*:*")).getResults()

View File

@ -203,10 +203,11 @@ public class ChaosMonkeySafeLeaderWithPullReplicasTest extends AbstractFullDistr
Thread.sleep(3000);
waitForThingsToLevelOut(3, TimeUnit.MINUTES);
log.info("control docs:" + controlClient.query(new SolrQuery("*:*")).getResults().getNumFound() + "\n\n");
log.info("collection state: " + printClusterStateInfo(DEFAULT_COLLECTION));
if (log.isInfoEnabled()) {
log.info("control docs:{}\n\n", controlClient.query(new SolrQuery("*:*")).getResults().getNumFound());
log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); // logOk
}
waitForReplicationFromReplicas(DEFAULT_COLLECTION, cloudClient.getZkStateReader(), new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME));
// waitForAllWarmingSearchers();

View File

@ -205,7 +205,9 @@ public class CollectionPropsTest extends SolrCloudTestCase {
// Trigger a value change event
log.info("setting value2");
collectionProps.setCollectionProperty(collectionName, "property", "value2");
log.info("(value2) waitForTrigger=={}", watcher.waitForTrigger());
if (log.isInfoEnabled()) {
log.info("(value2) waitForTrigger=={}", watcher.waitForTrigger());
}
assertEquals("value2", watcher.getProps().get("property"));
// Delete the properties znode

View File

@ -81,7 +81,9 @@ public class DeleteInactiveReplicaTest extends SolrCloudTestCase {
return r == null || r.getState() != Replica.State.ACTIVE;
});
log.info("Removing replica {}/{} ", shard.getName(), replica.getName());
if (log.isInfoEnabled()) {
log.info("Removing replica {}/{} ", shard.getName(), replica.getName());
}
CollectionAdminRequest.deleteReplica(collectionName, shard.getName(), replica.getName())
.process(cluster.getSolrClient());
waitForState("Expected deleted replica " + replica.getName() + " to be removed from cluster state", collectionName, (n, c) -> {

View File

@ -75,7 +75,7 @@ public class DeleteNodeTest extends SolrCloudTestCase {
// check what replicas are on the node, and whether the call should fail
boolean shouldFail = false;
DocCollection docColl = state.getCollection(coll);
log.info("#### DocCollection: " + docColl);
log.info("#### DocCollection: {}", docColl);
List<Replica> replicas = docColl.getReplicas(node2bdecommissioned);
if (replicas != null) {
for (Replica replica : replicas) {
@ -106,7 +106,9 @@ public class DeleteNodeTest extends SolrCloudTestCase {
}
Thread.sleep(50);
}
log.info("####### DocCollection after: " + cloudClient.getZkStateReader().getClusterState().getCollection(coll));
if (log.isInfoEnabled()) {
log.info("####### DocCollection after: {}", cloudClient.getZkStateReader().getClusterState().getCollection(coll));
}
if (shouldFail) {
assertTrue(String.valueOf(rsp), rsp.getRequestStatus() == RequestStatusState.FAILED);
} else {

View File

@ -467,7 +467,9 @@ public class DeleteReplicaTest extends SolrCloudTestCase {
try {
cluster.getSolrClient().waitForState(collectionName, 20, TimeUnit.SECONDS, (liveNodes, collectionState) -> collectionState.getReplicas().size() == 1);
} catch (TimeoutException e) {
log.info("Timeout wait for state {}", getCollectionState(collectionName));
if (log.isInfoEnabled()) {
log.info("Timeout wait for state {}", getCollectionState(collectionName));
}
throw e;
}
}

View File

@ -123,7 +123,7 @@ public class DistribJoinFromCollectionTest extends SolrCloudTestCase{
@AfterClass
public static void shutdown() {
log.info("DistribJoinFromCollectionTest logic complete ... deleting the " + toColl + " and " + fromColl + " collections");
log.info("DistribJoinFromCollectionTest logic complete ... deleting the {} and {} collections", toColl, fromColl);
// try to clean up
for (String c : new String[]{ toColl, fromColl }) {
@ -132,7 +132,7 @@ public class DistribJoinFromCollectionTest extends SolrCloudTestCase{
req.process(cluster.getSolrClient());
} catch (Exception e) {
// don't fail the test
log.warn("Could not delete collection {} after test completed due to: " + e, c);
log.warn("Could not delete collection {} after test completed due to:", c, e);
}
}

View File

@ -123,7 +123,7 @@ public class DistributedVersionInfoTest extends SolrCloudTestCase {
req.setParams(params);
req.add(doc);
log.info("Sending doc with out-of-date version ("+(maxOnReplica -1)+") document directly to replica");
log.info("Sending doc with out-of-date version ({}) document directly to replica", maxOnReplica -1);
client.request(req);
client.commit();
@ -244,7 +244,9 @@ public class DistributedVersionInfoTest extends SolrCloudTestCase {
cluster.getSolrClient().commit(COLLECTION);
log.info("Total of "+deletedDocs.size()+" docs deleted");
if (log.isInfoEnabled()) {
log.info("Total of {} docs deleted", deletedDocs.size());
}
maxOnLeader = getMaxVersionFromIndex(leader);
maxOnReplica = getMaxVersionFromIndex(replica);
@ -363,7 +365,7 @@ public class DistributedVersionInfoTest extends SolrCloudTestCase {
Thread.sleep(1000);
// send reload command for the collection
log.info("Sending RELOAD command for " + testCollectionName);
log.info("Sending RELOAD command for {}", testCollectionName);
CollectionAdminRequest.reloadCollection(testCollectionName)
.process(client);
Thread.sleep(2000); // reload can take a short while

View File

@ -89,7 +89,9 @@ public class ForceLeaderTest extends HttpPartitionTest {
JettySolrRunner notLeader0 = getJettyOnPort(getReplicaPort(notLeaders.get(0)));
ZkController zkController = notLeader0.getCoreContainer().getZkController();
log.info("Before put non leaders into lower term: " + printClusterStateInfo());
if (log.isInfoEnabled()) {
log.info("Before put non leaders into lower term: {}", printClusterStateInfo());
}
putNonLeadersIntoLowerTerm(testCollectionName, SHARD1, zkController, leader, notLeaders, cloudClient);
for (Replica replica : notLeaders) {
@ -109,7 +111,9 @@ public class ForceLeaderTest extends HttpPartitionTest {
}
}
assertEquals(2, numReplicasOnLiveNodes);
log.info("Before forcing leader: " + printClusterStateInfo());
if (log.isInfoEnabled()) {
log.info("Before forcing leader: {}", printClusterStateInfo());
}
// Assert there is no leader yet
assertNull("Expected no leader right now. State: " + clusterState.getCollection(testCollectionName).getSlice(SHARD1),
clusterState.getCollection(testCollectionName).getSlice(SHARD1).getLeader());
@ -124,7 +128,9 @@ public class ForceLeaderTest extends HttpPartitionTest {
cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName);
clusterState = cloudClient.getZkStateReader().getClusterState();
log.info("After forcing leader: " + clusterState.getCollection(testCollectionName).getSlice(SHARD1));
if (log.isInfoEnabled()) {
log.info("After forcing leader: {}", clusterState.getCollection(testCollectionName).getSlice(SHARD1));
}
// we have a leader
Replica newLeader = clusterState.getCollectionOrNull(testCollectionName).getSlice(SHARD1).getLeader();
assertNotNull(newLeader);
@ -195,7 +201,9 @@ public class ForceLeaderTest extends HttpPartitionTest {
}
// Kill the leader
log.info("Killing leader for shard1 of " + collectionName + " on node " + leader.getNodeName() + "");
if (log.isInfoEnabled()) {
log.info("Killing leader for shard1 of {} on node {}", collectionName, leader.getNodeName());
}
leaderJetty.stop();
// Wait for a steady state, till the shard is leaderless
@ -243,14 +251,16 @@ public class ForceLeaderTest extends HttpPartitionTest {
waitForRecoveriesToFinish(collection, cloudClient.getZkStateReader(), true);
cloudClient.getZkStateReader().forceUpdateCollection(collection);
ClusterState clusterState = cloudClient.getZkStateReader().getClusterState();
log.info("After bringing back leader: " + clusterState.getCollection(collection).getSlice(SHARD1));
if (log.isInfoEnabled()) {
log.info("After bringing back leader: {}", clusterState.getCollection(collection).getSlice(SHARD1));
}
int numActiveReplicas = getNumberOfActiveReplicas(clusterState, collection, SHARD1);
assertEquals(1+notLeaders.size(), numActiveReplicas);
log.info("Sending doc "+docid+"...");
log.info("Sending doc {}...", docid);
sendDoc(docid);
log.info("Committing...");
cloudClient.commit();
log.info("Doc "+docid+" sent and commit issued");
log.info("Doc {} sent and commit issued", docid);
assertDocsExistInAllReplicas(notLeaders, collection, docid, docid);
assertDocsExistInAllReplicas(Collections.singletonList(leader), collection, docid, docid);
}

View File

@ -480,7 +480,9 @@ public class FullSolrCloudDistribCmdsTest extends SolrCloudTestCase {
for (Replica replica : slice) {
try (HttpSolrClient replicaClient = getHttpSolrClient(replica.getCoreUrl())) {
final SolrDocumentList replicaResults = replicaClient.query(perReplicaParams).getResults();
log.debug("Shard {}: Replica ({}) results: {}", shardName, replica.getCoreName(), replicaResults);
if (log.isDebugEnabled()) {
log.debug("Shard {}: Replica ({}) results: {}", shardName, replica.getCoreName(), replicaResults);
}
assertEquals("inconsistency w/leader: shard=" + shardName + "core=" + replica.getCoreName(),
Collections.emptySet(),
CloudInspectUtil.showDiff(leaderResults, replicaResults,

View File

@ -107,7 +107,7 @@ class FullThrottleStoppableIndexingThread extends StoppableIndexingThread {
}
log.info("FT added docs:" + numAdds + " with " + fails + " fails" + " deletes:" + numDeletes);
log.info("FT added docs:{} with {} fails deletes:{}", numAdds, fails, numDeletes);
}
private void changeUrlOnError(Exception e) {

View File

@ -83,11 +83,15 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest {
+ printClusterStateInfo(),
notLeaders.size() == 1);
log.info("All replicas active for "+testCollectionName);
if (log.isInfoEnabled()) {
log.info("All replicas active for {}", testCollectionName);
}
// let's put the leader in its own partition, no replicas can contact it now
Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1");
log.info("Creating partition to leader at "+leader.getCoreUrl());
if (log.isInfoEnabled()) {
log.info("Creating partition to leader at {}", leader.getCoreUrl());
}
SocketProxy leaderProxy = getProxyForReplica(leader);
leaderProxy.close();
@ -101,7 +105,9 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest {
leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1");
assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState());
log.info("Healing partitioned replica at "+leader.getCoreUrl());
if (log.isInfoEnabled()) {
log.info("Healing partitioned replica at {}", leader.getCoreUrl());
}
leaderProxy.reopen();
Thread.sleep(sleepMsBeforeHealPartition);
@ -126,11 +132,13 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest {
+ printClusterStateInfo(),
notLeaders.size() == 2);
log.info("All replicas active for "+testCollectionName);
log.info("All replicas active for {}", testCollectionName);
// let's put the leader in its own partition, no replicas can contact it now
Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1");
log.info("Creating partition to leader at "+leader.getCoreUrl());
if (log.isInfoEnabled()) {
log.info("Creating partition to leader at {}", leader.getCoreUrl());
}
SocketProxy leaderProxy = getProxyForReplica(leader);
leaderProxy.close();
@ -143,7 +151,9 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest {
leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1");
assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState());
log.info("Healing partitioned replica at "+leader.getCoreUrl());
if (log.isInfoEnabled()) {
log.info("Healing partitioned replica at {}", leader.getCoreUrl());
}
leaderProxy.reopen();
Thread.sleep(sleepMsBeforeHealPartition);
@ -165,21 +175,22 @@ public class HttpPartitionOnCommitTest extends BasicDistributedZkTest {
protected void sendCommitWithRetry(Replica replica) throws Exception {
String replicaCoreUrl = replica.getCoreUrl();
log.info("Sending commit request to: "+replicaCoreUrl);
log.info("Sending commit request to: {}", replicaCoreUrl);
final RTimer timer = new RTimer();
try (HttpSolrClient client = getHttpSolrClient(replicaCoreUrl)) {
try {
client.commit();
log.info("Sent commit request to {} OK, took {}ms", replicaCoreUrl, timer.getTime());
if (log.isInfoEnabled()) {
log.info("Sent commit request to {} OK, took {}ms", replicaCoreUrl, timer.getTime());
}
} catch (Exception exc) {
Throwable rootCause = SolrException.getRootCause(exc);
if (rootCause instanceof NoHttpResponseException) {
log.warn("No HTTP response from sending commit request to "+replicaCoreUrl+
"; will re-try after waiting 3 seconds");
log.warn("No HTTP response from sending commit request to {}; will re-try after waiting 3 seconds", replicaCoreUrl);
Thread.sleep(3000);
client.commit();
log.info("Second attempt at sending commit to "+replicaCoreUrl+" succeeded.");
log.info("Second attempt at sending commit to {} succeeded", replicaCoreUrl);
} else {
throw exc;
}

View File

@ -260,7 +260,7 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase {
maxVersionBefore = ulog.getCurrentMaxVersion();
}
assertNotNull("max version bucket seed not set for core " + coreName, maxVersionBefore);
log.info("Looked up max version bucket seed "+maxVersionBefore+" for core "+coreName);
log.info("Looked up max version bucket seed {} for core {}", maxVersionBefore, coreName);
// now up the stakes and do more docs
int numDocs = TEST_NIGHTLY ? 1000 : 105;
@ -296,15 +296,15 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase {
try (SolrCore core = coreContainer.getCore(coreName)) {
assertNotNull("Core '" + coreName + "' not found for replica: " + notLeader.getName(), core);
Long currentMaxVersion = core.getUpdateHandler().getUpdateLog().getCurrentMaxVersion();
log.info("After recovery, looked up NEW max version bucket seed " + currentMaxVersion +
" for core " + coreName + ", was: " + maxVersionBefore);
log.info("After recovery, looked up NEW max version bucket seed {} for core {}, was: {}"
, currentMaxVersion, coreName, maxVersionBefore);
assertTrue("max version bucket seed not updated after recovery!", currentMaxVersion > maxVersionBefore);
}
// verify all docs received
assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, numDocs + 3);
log.info("testRf2 succeeded ... deleting the "+testCollectionName+" collection");
log.info("testRf2 succeeded ... deleting the {} collection", testCollectionName);
// try to clean up
attemptCollectionDelete(cloudClient, testCollectionName);
@ -374,7 +374,7 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase {
assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, 4);
log.info("testRf3 succeeded ... deleting the "+testCollectionName+" collection");
log.info("testRf3 succeeded ... deleting the {} collection", testCollectionName);
// try to clean up
attemptCollectionDelete(cloudClient, testCollectionName);
@ -433,7 +433,9 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase {
// TODO: This test logic seems to be timing dependent and fails on Jenkins
// need to come up with a better approach
log.info("Sending doc 2 to old leader "+leader.getName());
if (log.isInfoEnabled()) {
log.info("Sending doc 2 to old leader {}", leader.getName());
}
try ( HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName)) {
leaderSolr.add(doc);
@ -458,7 +460,7 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase {
waitToSeeReplicasActive(testCollectionName, "shard1", replicasToCheck, 30);
assertDocsExistInAllReplicas(participatingReplicas, testCollectionName, 1, 2);
log.info("testLeaderZkSessionLoss succeeded ... deleting the "+testCollectionName+" collection");
log.info("testLeaderZkSessionLoss succeeded ... deleting the {} collection", testCollectionName);
// try to clean up
attemptCollectionDelete(cloudClient, testCollectionName);
@ -609,7 +611,9 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase {
final Replica.State state = replica.getState();
if (state != Replica.State.ACTIVE) {
log.info("Replica " + replica.getName() + " is currently " + state);
if (log.isInfoEnabled()) {
log.info("Replica {} is currently {}", replica.getName(), state);
}
allReplicasUp = false;
}
}
@ -626,7 +630,9 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase {
fail("Didn't see replicas "+ replicasToCheck +
" come up within " + maxWaitMs + " ms! ClusterState: " + printClusterStateInfo(testCollectionName));
log.info("Took {} ms to see replicas [{}] become active.", timer.getTime(), replicasToCheck);
if (log.isInfoEnabled()) {
log.info("Took {} ms to see replicas [{}] become active.", timer.getTime(), replicasToCheck);
}
}
}

View File

@ -78,7 +78,7 @@ public class KerberosTestServices {
FileUtils.deleteDirectory(workDir); // clean directory
numTries++;
if (numTries == 3) {
log.error("Failed setting up MiniKDC. Tried " + numTries + " times.");
log.error("Failed setting up MiniKDC. Tried {} times.", numTries);
throw e;
}
log.error("BindException encountered when setting up MiniKdc. Trying again.");

View File

@ -99,7 +99,7 @@ public class LeaderElectionTest extends SolrTestCaseJ4 {
throws KeeperException, InterruptedException, IOException {
super.runLeaderProcess(weAreReplacement, pauseBeforeStartMs);
if (runLeaderDelay > 0) {
log.info("Sleeping for " + runLeaderDelay + "ms to simulate leadership takeover delay");
log.info("Sleeping for {}ms to simulate leadership takeover delay", runLeaderDelay);
Thread.sleep(runLeaderDelay);
}
}
@ -352,7 +352,7 @@ public class LeaderElectionTest extends SolrTestCaseJ4 {
@Test
public void testParallelElection() throws Exception {
final int numShards = 2 + random().nextInt(18);
log.info("Testing parallel election across " + numShards + " shards");
log.info("Testing parallel election across {} shards", numShards);
List<ClientThread> threads = new ArrayList<>();

View File

@ -142,7 +142,7 @@ public class LeaderFailoverAfterPartitionTest extends HttpPartitionTest {
if (oldLeaderProxy != null) {
oldLeaderProxy.close();
} else {
log.warn("No SocketProxy found for old leader node "+leaderNode);
log.warn("No SocketProxy found for old leader node {}",leaderNode);
}
Thread.sleep(10000); // give chance for new leader to be elected.

View File

@ -102,7 +102,9 @@ public class LeaderTragicEventTest extends SolrCloudTestCase {
for (String id : addedIds) {
assertNotNull(cluster.getSolrClient().getById(collection,id));
}
log.info("The test success oldLeader:{} currentState:{}", oldLeader, getCollectionState(collection));
if (log.isInfoEnabled()) {
log.info("The test success oldLeader:{} currentState:{}", oldLeader, getCollectionState(collection));
}
} finally {
CollectionAdminRequest.deleteCollection(collection).process(cluster.getSolrClient());
@ -178,7 +180,9 @@ public class LeaderTragicEventTest extends SolrCloudTestCase {
if (numReplicas == 2) {
Slice shard = getCollectionState(collection).getSlice("shard1");
otherReplicaJetty = cluster.getReplicaJetty(getNonLeader(shard));
log.info("Stop jetty node : {} state:{}", otherReplicaJetty.getBaseUrl(), getCollectionState(collection));
if (log.isInfoEnabled()) {
log.info("Stop jetty node : {} state:{}", otherReplicaJetty.getBaseUrl(), getCollectionState(collection));
}
otherReplicaJetty.stop();
cluster.waitForJettyToStop(otherReplicaJetty);
waitForState("Timeout waiting for replica get down", collection, (liveNodes, collectionState) -> getNonLeader(collectionState.getSlice("shard1")).getState() != Replica.State.ACTIVE);

View File

@ -92,7 +92,9 @@ public class LeaderVoteWaitTimeoutTest extends SolrCloudTestCase {
cluster.stopJettySolrRunner(jetty);// TODO: Can we avoid this restart
cluster.startJettySolrRunner(jetty);
proxy.open(jetty.getBaseUrl().toURI());
log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
if (log.isInfoEnabled()) {
log.info("Adding proxy for URL: {}. Proxy {}", jetty.getBaseUrl(), proxy.getUrl());
}
proxies.put(jetty, proxy);
jettys.put(proxy.getUrl(), jetty);
}

View File

@ -153,14 +153,14 @@ public class MigrateRouteKeyTest extends SolrCloudTestCase {
cluster.getSolrClient().deleteById("a/" + BIT_SEP + "!104");
splitKeyCount[0]--;
} catch (Exception e) {
log.warn("Error deleting document a/" + BIT_SEP + "!104", e);
log.warn("Error deleting document a/{}!104", BIT_SEP, e);
}
cluster.getSolrClient().commit();
collectionClient.commit();
solrQuery = new SolrQuery("*:*").setRows(1000);
QueryResponse response = collectionClient.query(solrQuery);
log.info("Response from target collection: " + response);
log.info("Response from target collection: {}", response);
assertEquals("DocCount on target collection does not match", splitKeyCount[0], response.getResults().getNumFound());
waitForState("Expected to find routing rule for split key " + splitKey, "sourceCollection", (n, c) -> {
@ -208,7 +208,7 @@ public class MigrateRouteKeyTest extends SolrCloudTestCase {
if (splitKey.equals(shardKey))
splitKeyCount++;
} catch (Exception e) {
log.error("Exception while adding document id: " + doc.getField("id"), e);
log.error("Exception while adding document id: {}", doc.getField("id"), e);
}
try {
Thread.sleep(50);

View File

@ -95,7 +95,9 @@ public class MoveReplicaTest extends SolrCloudTestCase {
// commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018
public void test() throws Exception {
String coll = getTestClass().getSimpleName() + "_coll_" + inPlaceMove;
log.info("total_jettys: " + cluster.getJettySolrRunners().size());
if (log.isInfoEnabled()) {
log.info("total_jettys: {}", cluster.getJettySolrRunners().size());
}
int REPLICATION = 2;
CloudSolrClient cloudClient = cluster.getSolrClient();
@ -154,7 +156,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
boolean recovered = false;
for (int i = 0; i < 300; i++) {
DocCollection collState = getCollectionState(coll);
log.debug("###### " + collState);
log.debug("###### {}", collState);
Collection<Replica> replicas = collState.getSlice(shardId).getReplicas();
boolean allActive = true;
boolean hasLeaders = true;
@ -164,7 +166,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
continue;
}
if (!r.isActive(Collections.singleton(targetNode))) {
log.info("Not active: " + r);
log.info("Not active: {}", r);
allActive = false;
}
}
@ -182,7 +184,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
recovered = true;
break;
} else {
log.info("--- waiting, allActive=" + allActive + ", hasLeaders=" + hasLeaders);
log.info("--- waiting, allActive={}, hasLeaders={}", allActive, hasLeaders);
Thread.sleep(1000);
}
}
@ -198,7 +200,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
recovered = false;
for (int i = 0; i < 300; i++) {
DocCollection collState = getCollectionState(coll);
log.debug("###### " + collState);
log.debug("###### {}", collState);
Collection<Replica> replicas = collState.getSlice(shardId).getReplicas();
boolean allActive = true;
boolean hasLeaders = true;
@ -208,7 +210,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
continue;
}
if (!r.isActive(Collections.singleton(replica.getNodeName()))) {
log.info("Not active yet: " + r);
log.info("Not active yet: {}", r);
allActive = false;
}
}
@ -301,7 +303,9 @@ public class MoveReplicaTest extends SolrCloudTestCase {
}
assertFalse(success);
log.info("--- current collection state: " + cloudClient.getZkStateReader().getClusterState().getCollection(coll));
if (log.isInfoEnabled()) {
log.info("--- current collection state: {}", cloudClient.getZkStateReader().getClusterState().getCollection(coll));
}
assertEquals(100, cluster.getSolrClient().query(coll, new SolrQuery("*:*")).getResults().getNumFound());
}

View File

@ -58,11 +58,13 @@ public class OutOfBoxZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 {
@Override
public void setUp() throws Exception {
super.setUp();
log.info("####SETUP_START " + getTestName());
if (log.isInfoEnabled()) {
log.info("####SETUP_START {}", getTestName());
}
createTempDir();
zkDir = createTempDir().resolve("zookeeper/server1/data");
log.info("ZooKeeper dataDir:" + zkDir);
log.info("ZooKeeper dataDir:{}", zkDir);
zkServer = new ZkTestServer(zkDir);
zkServer.run();
@ -80,7 +82,9 @@ public class OutOfBoxZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 {
zkClient.create(SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH, "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false);
zkClient.close();
log.info("####SETUP_END " + getTestName());
if (log.isInfoEnabled()) {
log.info("####SETUP_END {}", getTestName());
}
}
@Override
@ -123,7 +127,7 @@ public class OutOfBoxZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 {
protected void assertOpenACLUnsafeAllover(SolrZkClient zkClient, String path, List<String> verifiedList) throws Exception {
List<ACL> acls = zkClient.getSolrZooKeeper().getACL(path, new Stat());
if (log.isInfoEnabled()) {
log.info("Verifying " + path);
log.info("Verifying {}", path);
}
if (ZooDefs.CONFIG_NODE.equals(path)) {
// Treat this node specially, from the ZK docs:

View File

@ -63,11 +63,13 @@ public class OverriddenZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 {
@Override
public void setUp() throws Exception {
super.setUp();
log.info("####SETUP_START " + getTestName());
if (log.isInfoEnabled()) {
log.info("####SETUP_START {}", getTestName());
}
createTempDir();
zkDir =createTempDir().resolve("zookeeper/server1/data");
log.info("ZooKeeper dataDir:" + zkDir);
log.info("ZooKeeper dataDir:{}", zkDir);
zkServer = new ZkTestServer(zkDir);
zkServer.run(false);
@ -92,7 +94,9 @@ public class OverriddenZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 {
zkClient.makePath("/unprotectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false);
zkClient.close();
log.info("####SETUP_END " + getTestName());
if (log.isInfoEnabled()) {
log.info("####SETUP_END {}", getTestName());
}
}
@Override

View File

@ -745,7 +745,9 @@ public class OverseerCollectionConfigSetProcessorTest extends SolrTestCaseJ4 {
overseerMock, completedMapMock, failureMapMock);
log.info("clusterstate " + clusterStateMock.hashCode());
if (log.isInfoEnabled()) {
log.info("clusterstate {}", clusterStateMock.hashCode());
}
startComponentUnderTest();

View File

@ -76,7 +76,7 @@ public class OverseerRolesTest extends SolrCloudTestCase {
}
private void waitForNewOverseer(int seconds, String expected, boolean failOnIntermediateTransition) throws Exception {
log.info("Expecting node: "+expected);
log.info("Expecting node: {}", expected);
waitForNewOverseer(seconds, s -> Objects.equals(s, expected), failOnIntermediateTransition);
}
@ -97,8 +97,10 @@ public class OverseerRolesTest extends SolrCloudTestCase {
}
private void logOverseerState() throws KeeperException, InterruptedException {
log.info("Overseer: {}", getLeaderNode(zkClient()));
log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election"));
if (log.isInfoEnabled()) {
log.info("Overseer: {}", getLeaderNode(zkClient()));
log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); // logOk
}
}
@Test
@ -195,7 +197,7 @@ public class OverseerRolesTest extends SolrCloudTestCase {
logOverseerState();
// kill the current overseer, and check that the next node in the election queue assumes leadership
leaderJetty.stop();
log.info("Killing designated overseer: "+overseer1);
log.info("Killing designated overseer: {}", overseer1);
// after 5 seconds, bring back dead designated overseer and assert that it assumes leadership "right away",
// i.e. without any other node assuming leadership before this node becomes leader.

View File

@ -558,7 +558,8 @@ public class OverseerTest extends SolrTestCaseJ4 {
}
}
log.warn("Timeout waiting for collections: " + Arrays.asList(collections) + " state:" + stateReader.getClusterState());
log.warn("Timeout waiting for collections: {} state: {}"
, Arrays.asList(collections), stateReader.getClusterState());
}
@Test
@ -1163,7 +1164,9 @@ public class OverseerTest extends SolrTestCaseJ4 {
if (Arrays.binarySearch(interestingOps, op) < 0)
continue;
Stats.Stat stat = entry.getValue();
log.info("op: {}, success: {}, failure: {}", op, stat.success.get(), stat.errors.get());
if (log.isInfoEnabled()) {
log.info("op: {}, success: {}, failure: {}", op, stat.success.get(), stat.errors.get());
}
Timer timer = stat.requestTime;
printTimingStats(timer);
}
@ -1177,15 +1180,17 @@ public class OverseerTest extends SolrTestCaseJ4 {
private void printTimingStats(Timer timer) {
Snapshot snapshot = timer.getSnapshot();
log.info("\t avgRequestsPerSecond: {}", timer.getMeanRate());
log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate());
log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate());
log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean()));
log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian()));
log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile()));
log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile()));
log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile()));
log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile()));
if (log.isInfoEnabled()) {
log.info("\t avgRequestsPerSecond: {}", timer.getMeanRate());
log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate()); // logOk
log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate()); // logOk
log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean())); // logOk
log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian())); // logOk
log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile())); // logOk
log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile())); // logOk
log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile())); // logOk
log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile())); // logOk
}
}
private static long nsToMs(double ns) {

View File

@ -60,7 +60,9 @@ public class ReplaceNodeNoTargetTest extends SolrCloudTestCase {
@LuceneTestCase.AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-11067")
public void test() throws Exception {
String coll = "replacenodetest_coll_notarget";
log.info("total_jettys: " + cluster.getJettySolrRunners().size());
if (log.isInfoEnabled()) {
log.info("total_jettys: {}", cluster.getJettySolrRunners().size());
}
CloudSolrClient cloudClient = cluster.getSolrClient();
Set<String> liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes();
@ -79,11 +81,13 @@ public class ReplaceNodeNoTargetTest extends SolrCloudTestCase {
cloudClient.request(create);
cluster.waitForActiveCollection(coll, 5, 10);
log.info("Current core status list for node we plan to decommision: {} => {}",
node2bdecommissioned,
getCoreStatusForNamedNode(cloudClient, node2bdecommissioned).getCoreStatus());
log.info("Decommisioning node: " + node2bdecommissioned);
if (log.isInfoEnabled()) {
log.info("Current core status list for node we plan to decommision: {} => {}",
node2bdecommissioned,
getCoreStatusForNamedNode(cloudClient, node2bdecommissioned).getCoreStatus());
log.info("Decommisioning node: {}", node2bdecommissioned);
}
createReplaceNodeRequest(node2bdecommissioned, null, null).processAsync("001", cloudClient);
CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("001");
boolean success = false;

View File

@ -60,7 +60,9 @@ public class ReplaceNodeTest extends SolrCloudTestCase {
@Test
public void test() throws Exception {
String coll = "replacenodetest_coll";
log.info("total_jettys: " + cluster.getJettySolrRunners().size());
if (log.isInfoEnabled()) {
log.info("total_jettys: {}", cluster.getJettySolrRunners().size());
}
CloudSolrClient cloudClient = cluster.getSolrClient();
Set<String> liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes();
@ -89,7 +91,7 @@ public class ReplaceNodeTest extends SolrCloudTestCase {
cluster.waitForActiveCollection(coll, 5, 5 * (create.getNumNrtReplicas() + create.getNumPullReplicas() + create.getNumTlogReplicas()));
DocCollection collection = cloudClient.getZkStateReader().getClusterState().getCollection(coll);
log.debug("### Before decommission: " + collection);
log.debug("### Before decommission: {}", collection);
log.info("excluded_node : {} ", emptyNode);
createReplaceNodeRequest(node2bdecommissioned, emptyNode, null).processAsync("000", cloudClient);
CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("000");
@ -111,13 +113,13 @@ public class ReplaceNodeTest extends SolrCloudTestCase {
Thread.sleep(5000);
collection = cloudClient.getZkStateReader().getClusterState().getCollection(coll);
log.debug("### After decommission: " + collection);
log.debug("### After decommission: {}", collection);
// check what are replica states on the decommissioned node
List<Replica> replicas = collection.getReplicas(node2bdecommissioned);
if (replicas == null) {
replicas = Collections.emptyList();
}
log.debug("### Existing replicas on decommissioned node: " + replicas);
log.debug("### Existing replicas on decommissioned node: {}", replicas);
//let's do it back - this time wait for recoveries
CollectionAdminRequest.AsyncCollectionAdminRequest replaceNodeRequest = createReplaceNodeRequest(emptyNode, node2bdecommissioned, Boolean.TRUE);

View File

@ -94,8 +94,10 @@ public class ReplicationFactorTest extends AbstractFullDistribZkTestBase {
testRf2NotUsingDirectUpdates();
waitForThingsToLevelOut(30, TimeUnit.SECONDS);
log.info("replication factor testing complete! final clusterState is: "+
cloudClient.getZkStateReader().getClusterState());
if (log.isInfoEnabled()) {
log.info("replication factor testing complete! final clusterState is: {}",
cloudClient.getZkStateReader().getClusterState());
}
}
protected void testRf2NotUsingDirectUpdates() throws Exception {

View File

@ -83,7 +83,7 @@ public class RollingRestartTest extends AbstractFullDistribZkTestBase {
boolean sawLiveDesignate = false;
int numRestarts = 1 + random().nextInt(TEST_NIGHTLY ? 12 : 2);
for (int i = 0; i < numRestarts; i++) {
log.info("Rolling restart #{}", i + 1);
log.info("Rolling restart #{}", i + 1); // logOk
for (CloudJettyRunner cloudJetty : designateJettys) {
log.info("Restarting {}", cloudJetty);
chaosMonkey.stopJetty(cloudJetty);
@ -95,8 +95,8 @@ public class RollingRestartTest extends AbstractFullDistribZkTestBase {
if (!success) {
leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient());
if (leader == null)
log.error("NOOVERSEER election queue is :" +
OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(),
log.error("NOOVERSEER election queue is : {}"
, OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(),
"/overseer_elect/election"));
fail("No overseer designate as leader found after restart #" + (i + 1) + ": " + leader);
}
@ -106,8 +106,8 @@ public class RollingRestartTest extends AbstractFullDistribZkTestBase {
if (!success) {
leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient());
if (leader == null)
log.error("NOOVERSEER election queue is :" +
OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(),
log.error("NOOVERSEER election queue is :{}"
, OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(),
"/overseer_elect/election"));
fail("No overseer leader found after restart #" + (i + 1) + ": " + leader);
}

View File

@ -69,11 +69,13 @@ public class SaslZkACLProviderTest extends SolrTestCaseJ4 {
@Override
public void setUp() throws Exception {
super.setUp();
log.info("####SETUP_START " + getTestName());
if (log.isInfoEnabled()) {
log.info("####SETUP_START {}", getTestName());
}
createTempDir();
Path zkDir = createTempDir().resolve("zookeeper/server1/data");
log.info("ZooKeeper dataDir:" + zkDir);
log.info("ZooKeeper dataDir:{}", zkDir);
zkServer = new SaslZkTestServer(zkDir, createTempDir().resolve("miniKdc"));
zkServer.run();
@ -84,7 +86,9 @@ public class SaslZkACLProviderTest extends SolrTestCaseJ4 {
}
setupZNodes();
log.info("####SETUP_END " + getTestName());
if (log.isInfoEnabled()) {
log.info("####SETUP_END {}", getTestName());
}
}
protected void setupZNodes() throws Exception {

View File

@ -98,7 +98,7 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
SolrCLI.CreateCollectionTool tool = new SolrCLI.CreateCollectionTool();
CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args);
log.info("Creating the '"+testCollectionName+"' collection using SolrCLI with: "+solrUrl);
log.info("Creating the '{}' collection using SolrCLI with: {}", testCollectionName, solrUrl);
tool.runTool(cli);
assertTrue("Collection '" + testCollectionName + "' doesn't exist after trying to create it!",
cloudClient.getZkStateReader().getClusterState().hasCollection(testCollectionName));
@ -140,7 +140,9 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
expectedXmlFileCount, xmlFiles.size());
for (File xml : xmlFiles) {
log.info("POSTing "+xml.getAbsolutePath());
if (log.isInfoEnabled()) {
log.info("POSTing {}", xml.getAbsolutePath());
}
cloudClient.request(new StreamingUpdateRequest("/update",xml,"application/xml"));
}
cloudClient.commit();
@ -156,14 +158,14 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
}
assertEquals("*:* found unexpected number of documents", expectedXmlDocCount, numFound);
log.info("Updating Config for " + testCollectionName);
log.info("Updating Config for {}", testCollectionName);
doTestConfigUpdate(testCollectionName, solrUrl);
log.info("Running healthcheck for " + testCollectionName);
log.info("Running healthcheck for {}", testCollectionName);
doTestHealthcheck(testCollectionName, cloudClient.getZkHost());
// verify the delete action works too
log.info("Running delete for "+testCollectionName);
log.info("Running delete for {}", testCollectionName);
doTestDeleteAction(testCollectionName, solrUrl);
log.info("testLoadDocsIntoGettingStartedCollection succeeded ... shutting down now!");
@ -218,7 +220,7 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
SolrCLI.ConfigTool tool = new SolrCLI.ConfigTool();
CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args);
log.info("Sending set-property '" + prop + "'=" + maxTime + " to SolrCLI.ConfigTool.");
log.info("Sending set-property '{}'={} to SolrCLI.ConfigTool.", prop, maxTime);
assertTrue("Set config property failed!", tool.runTool(cli) == 0);
configJson = SolrCLI.getJson(configUrl);
@ -234,7 +236,9 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
assertEquals("Should have been able to get a value from the /query request handler",
"explicit", SolrCLI.atPath("/config/requestHandler/\\/query/defaults/echoParams", configJson));
log.info("live_nodes_count : " + cloudClient.getZkStateReader().getClusterState().getLiveNodes());
if (log.isInfoEnabled()) {
log.info("live_nodes_count : {}", cloudClient.getZkStateReader().getClusterState().getLiveNodes());
}
// Since it takes some time for this command to complete we need to make sure all the reloads for
// all the cores have been done.

View File

@ -81,7 +81,9 @@ public class SolrXmlInZkTest extends SolrTestCaseJ4 {
zkClient.close();
log.info("####SETUP_START " + getTestName());
if (log.isInfoEnabled()) {
log.info("####SETUP_START {}", getTestName());
}
// set some system properties for use by tests
Properties props = new Properties();
@ -89,7 +91,9 @@ public class SolrXmlInZkTest extends SolrTestCaseJ4 {
props.setProperty("solr.test.sys.prop2", "proptwo");
cfg = SolrDispatchFilter.loadNodeConfig(solrHome, props);
log.info("####SETUP_END " + getTestName());
if (log.isInfoEnabled()) {
log.info("####SETUP_END {}", getTestName());
}
}
private void closeZK() throws Exception {

View File

@ -181,7 +181,7 @@ public class SplitShardTest extends SolrCloudTestCase {
long numFound = 0;
try {
numFound = replicaClient.query(params("q", "*:*", "distrib", "false")).getResults().getNumFound();
log.info("Replica count=" + numFound + " for " + replica);
log.info("Replica count={} for {}", numFound, replica);
} finally {
replicaClient.close();
}
@ -269,11 +269,11 @@ public class SplitShardTest extends SolrCloudTestCase {
String id = (String) doc.get("id");
leftover.remove(id);
}
log.error("MISSING DOCUMENTS: " + leftover);
log.error("MISSING DOCUMENTS: {}", leftover);
}
assertEquals("Documents are missing!", docsIndexed.get(), numDocs);
log.info("Number of documents indexed and queried : " + numDocs);
log.info("Number of documents indexed and queried : {}", numDocs);
}

View File

@ -130,7 +130,7 @@ public class SystemCollectionCompatTest extends SolrCloudTestCase {
long currentTime = getCoreStatus(r).getCoreStartTime().getTime();
allReloaded = allReloaded && (previousTime < currentTime);
} catch (Exception e) {
log.warn("Error retrieving replica status of " + Utils.toJSONString(r), e);
log.warn("Error retrieving replica status of {}", Utils.toJSONString(r), e);
allReloaded = false;
}
}
@ -185,7 +185,9 @@ public class SystemCollectionCompatTest extends SolrCloudTestCase {
watcher.reset();
// restart Overseer to trigger the back-compat check
log.info("Stopping Overseer Node: {} ({})", overseerNode.getNodeName(), overseerNode.getLocalPort());
if (log.isInfoEnabled()) {
log.info("Stopping Overseer Node: {} ({})", overseerNode.getNodeName(), overseerNode.getLocalPort());
}
cluster.stopJettySolrRunner(overseerNode);
log.info("Waiting for new overseer election...");
TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, cloudManager.getTimeSource());
@ -217,11 +219,15 @@ public class SystemCollectionCompatTest extends SolrCloudTestCase {
continue;
}
if (doc.getFieldValue("message").toString().contains("re-indexing")) {
log.info("Found re-indexing message: {}", doc.getFieldValue("message"));
if (log.isInfoEnabled()) {
log.info("Found re-indexing message: {}", doc.getFieldValue("message"));
}
foundWarning = true;
}
if (doc.getFieldValue("message").toString().contains("timestamp")) {
log.info("Found timestamp message: {}", doc.getFieldValue("message"));
if (log.isInfoEnabled()) {
log.info("Found timestamp message: {}", doc.getFieldValue("message"));
}
foundSchemaWarning = true;
}
}

View File

@ -149,7 +149,7 @@ public class TestAuthenticationFramework extends SolrCloudTestCase {
String username = httpRequest.getHeader("username");
String password = httpRequest.getHeader("password");
log.info("Username: "+username+", password: "+password);
log.info("Username: {}, password: {}", username, password);
if(MockAuthenticationPlugin.expectedUsername.equals(username) && MockAuthenticationPlugin.expectedPassword.equals(password)) {
filterChain.doFilter(request, response);
return true;

View File

@ -69,7 +69,9 @@ public class TestCloudConsistency extends SolrCloudTestCase {
cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart
cluster.startJettySolrRunner(jetty);
proxy.open(jetty.getBaseUrl().toURI());
log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
if (log.isInfoEnabled()) {
log.info("Adding proxy for URL: {}. Proxy: {}", jetty.getBaseUrl(), proxy.getUrl());
}
proxies.put(jetty, proxy);
jettys.put(proxy.getUrl(), jetty);
}

View File

@ -362,7 +362,7 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase {
// no stats for this pivot, nothing to check
// TODO: use a trace param to know if/how-many to expect ?
log.info("No stats to check for => " + message);
log.info("No stats to check for => {}", message);
return;
}
@ -380,7 +380,7 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase {
// StatsComponent results being "null" (and not even included in the
// getFieldStatsInfo() Map due to specila SolrJ logic)
log.info("Requested stats missing in verification query, pivot stats: " + pivotStats);
log.info("Requested stats missing in verification query, pivot stats: {}", pivotStats);
assertEquals("Special Count", 0L, pivotStats.getCount().longValue());
assertEquals("Special Missing",
constraint.getCount(), pivotStats.getMissing().longValue());

View File

@ -259,7 +259,9 @@ public class TestCloudSearcherWarming extends SolrCloudTestCase {
log.info("Active replica: {}", coreNodeName);
for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) {
JettySolrRunner jettySolrRunner = cluster.getJettySolrRunner(i);
log.info("Checking node: {}", jettySolrRunner.getNodeName());
if (log.isInfoEnabled()) {
log.info("Checking node: {}", jettySolrRunner.getNodeName());
}
if (jettySolrRunner.getNodeName().equals(replica.getNodeName())) {
SolrDispatchFilter solrDispatchFilter = jettySolrRunner.getSolrDispatchFilter();
try (SolrCore core = solrDispatchFilter.getCores().getCore(coreName)) {
@ -268,7 +270,9 @@ public class TestCloudSearcherWarming extends SolrCloudTestCase {
assert false;
return false;
}
log.info("Found SolrCore: {}, id: {}", core.getName(), core);
if (log.isInfoEnabled()) {
log.info("Found SolrCore: {}, id: {}", core.getName(), core);
}
RefCounted<SolrIndexSearcher> registeredSearcher = core.getRegisteredSearcher();
if (registeredSearcher != null) {
log.error("registered searcher not null, maxdocs = {}", registeredSearcher.get().maxDoc());
@ -326,13 +330,19 @@ public class TestCloudSearcherWarming extends SolrCloudTestCase {
if (sleepTime.get() > 0) {
TestCloudSearcherWarming.coreNodeNameRef.set(newSearcher.getCore().getCoreDescriptor().getCloudDescriptor().getCoreNodeName());
TestCloudSearcherWarming.coreNameRef.set(newSearcher.getCore().getName());
log.info("Sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore());
if (log.isInfoEnabled()) {
log.info("Sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}"
, sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore());
}
try {
Thread.sleep(sleepTime.get());
} catch (InterruptedException e) {
log.warn("newSearcher was interupdated", e);
}
log.info("Finished sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore());
if (log.isInfoEnabled()) {
log.info("Finished sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}"
, sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore());
}
}
}
}

View File

@ -500,10 +500,14 @@ public class TestConfigSetsAPI extends SolrTestCaseJ4 {
File.separator + TestUtil.randomSimpleString(random(), 6, 8) + ".zip");
File directory = TestDynamicLoading.getFile(directoryPath);
log.info("Directory: "+directory.getAbsolutePath());
if (log.isInfoEnabled()) {
log.info("Directory: {}", directory.getAbsolutePath());
}
try {
zip (directory, zipFile);
log.info("Zipfile: "+zipFile.getAbsolutePath());
if (log.isInfoEnabled()) {
log.info("Zipfile: {}", zipFile.getAbsolutePath());
}
return zipFile.getAbsolutePath();
} catch (IOException e) {
throw new RuntimeException(e);

View File

@ -64,7 +64,7 @@ public class TestDynamicFieldNamesIndexCorrectly extends AbstractFullDistribZkTe
void populateIndex(int numRuns) throws IOException, SolrServerException {
try {
for (int i = 0; i < numRuns; i++) {
log.debug("Iteration number: " + i);
log.debug("Iteration number: {}", i);
cloudClient.deleteByQuery(COLLECTION, "*:*");
cloudClient.commit(COLLECTION);
@ -74,7 +74,7 @@ public class TestDynamicFieldNamesIndexCorrectly extends AbstractFullDistribZkTe
final SolrQuery solrQuery = new SolrQuery("*:*");
solrQuery.setRows(solrDocs.size());
final SolrDocumentList resultDocs = getSolrResponse(solrQuery, COLLECTION);
log.debug(resultDocs.toString());
log.debug("{}", resultDocs);
assertThatDocsHaveCorrectFields(solrDocs, resultDocs);
}
} finally {

View File

@ -77,7 +77,9 @@ public class TestLeaderElectionZkExpiry extends SolrTestCaseJ4 {
try {
String leaderNode = OverseerCollectionConfigSetProcessor.getLeaderNode(zc);
if (leaderNode != null && !leaderNode.trim().isEmpty()) {
log.info("Time={} Overseer leader is = {}", System.nanoTime(), leaderNode);
if (log.isInfoEnabled()) {
log.info("Time={} Overseer leader is = {}", System.nanoTime(), leaderNode);
}
found = true;
break;
}

View File

@ -334,13 +334,17 @@ public class TestPullReplica extends SolrCloudTestCase {
return false;
}
statesSeen.add(r.getState());
log.info("CollectionStateWatcher saw state: {}", r.getState());
if (log.isInfoEnabled()) {
log.info("CollectionStateWatcher saw state: {}", r.getState());
}
return r.getState() == Replica.State.ACTIVE;
});
CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.PULL).process(cluster.getSolrClient());
waitForState("Replica not added", collectionName, activeReplicaCount(1, 0, 1));
zkClient().printLayoutToStream(System.out);
log.info("Saw states: " + Arrays.toString(statesSeen.toArray()));
if (log.isInfoEnabled()) {
log.info("Saw states: {}", Arrays.toString(statesSeen.toArray()));
}
assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), 3, statesSeen.size());
assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.DOWN, statesSeen.get(0));
assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.RECOVERING, statesSeen.get(0));

Some files were not shown because too many files have changed in this diff Show More