Merge pull request elastic/elasticsearch#1674 from ywelsch/enhance/fix-logger-usages

Fix wrong placeholder usage in logging statements

Original commit: elastic/x-pack-elasticsearch@101f043fac
This commit is contained in:
Yannick Welsch 2016-03-11 10:30:28 +01:00
commit 0abe314bb6
34 changed files with 65 additions and 55 deletions

View File

@ -174,9 +174,9 @@ public class LicensesService extends AbstractLifecycleComponent<LicensesService>
}
}
}
logger.error(builder.toString());
logger.error("{}", builder);
} else {
logger.error(general);
logger.error("{}", general);
}
}
}
@ -206,9 +206,9 @@ public class LicensesService extends AbstractLifecycleComponent<LicensesService>
}
}
}
logger.error(builder.toString());
logger.error("{}", builder.toString());
} else {
logger.error(general);
logger.error("{}", general);
}
}
}
@ -238,9 +238,9 @@ public class LicensesService extends AbstractLifecycleComponent<LicensesService>
}
}
}
logger.error(builder.toString());
logger.error("{}", builder.toString());
} else {
logger.error(general);
logger.error("{}", general);
}
}
}
@ -400,7 +400,7 @@ public class LicensesService extends AbstractLifecycleComponent<LicensesService>
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
LicensesMetaData licensesMetaData = newState.metaData().custom(LicensesMetaData.TYPE);
if (logger.isDebugEnabled()) {
logger.debug("registered trial license", licensesMetaData);
logger.debug("registered trial license: {}", licensesMetaData);
}
}

View File

@ -66,7 +66,7 @@ public abstract class AbstractLicensesConsumerPluginIntegrationTestCase extends
}
public void testTrialLicenseAndSignedLicenseNotification() throws Exception {
logger.info("using " + consumerPlugin.getClass().getName() + " consumer plugin");
logger.info("using {} consumer plugin", consumerPlugin.getClass().getName());
logger.info(" --> trial license generated");
// managerService should report feature to be enabled on all data nodes
assertLicenseeState(consumerPlugin.id(), LicenseState.ENABLED);

View File

@ -62,7 +62,7 @@ public class LicensesPluginIntegrationTests extends AbstractLicensesIntegrationT
}
public void testTrialLicenseAndSignedLicenseNotification() throws Exception {
logger.info("using " + ((useEagerLicenseRegistrationPlugin) ? "eager" : "lazy") + " consumer plugin");
logger.info("using {} consumer plugin", useEagerLicenseRegistrationPlugin ? "eager" : "lazy");
logger.info(" --> trial license generated");
// managerService should report feature to be enabled on all data nodes
assertLicenseeState(getCurrentFeatureName(), LicenseState.ENABLED);

View File

@ -80,7 +80,7 @@ public class LicensesServiceClusterTests extends AbstractLicensesIntegrationTest
wipeAllLicenses();
int numNodes = randomIntBetween(1, 5);
logger.info("--> starting " + numNodes + " node(s)");
logger.info("--> starting {} node(s)", numNodes);
for (int i = 0; i < numNodes; i++) {
internalCluster().startNode();
}

View File

@ -80,7 +80,7 @@ public abstract class AbstractCollector<T> extends AbstractLifecycleComponent<T>
return doCollect();
}
} catch (ElasticsearchTimeoutException e) {
logger.error("collector [{}] timed out when collecting data");
logger.error("collector [{}] timed out when collecting data", name());
} catch (Exception e) {
logger.error("collector [{}] - failed collecting data", e, name());
}

View File

@ -271,7 +271,7 @@ public class HttpExporter extends Exporter {
try {
Version remoteVersion = loadRemoteClusterVersion(host);
if (remoteVersion == null) {
logger.warn("unable to check remote cluster version: no version found on host [" + host + "]");
logger.warn("unable to check remote cluster version: no version found on host [{}]", host);
continue;
}
supportedClusterVersion = remoteVersion.onOrAfter(MIN_SUPPORTED_CLUSTER_VERSION);
@ -361,20 +361,19 @@ public class HttpExporter extends Exporter {
return conn;
} catch (URISyntaxException e) {
logErrorBasedOnLevel(e, "error parsing host [{}]", host);
logger.error("error parsing host [{}] [{}]", host, e.getMessage());
if (logger.isDebugEnabled()) {
logger.debug("error parsing host [{}]. full error details:\n[{}]", host, ExceptionsHelper.detailedMessage(e));
}
} catch (IOException e) {
logErrorBasedOnLevel(e, "error connecting to [{}]", host);
logger.error("error connecting to [{}] [{}]", host, e.getMessage());
if (logger.isDebugEnabled()) {
logger.debug("error connecting to [{}]. full error details:\n[{}]", host, ExceptionsHelper.detailedMessage(e));
}
}
return null;
}
private void logErrorBasedOnLevel(Throwable t, String msg, Object... params) {
logger.error(msg + " [" + t.getMessage() + "]", params);
if (logger.isDebugEnabled()) {
logger.debug(msg + ". full error details:\n[{}]", params, ExceptionsHelper.detailedMessage(t));
}
}
/**
* Get the version of the remote monitoring cluster
*/

View File

@ -74,7 +74,7 @@ public class IndexRecoveryCollectorTests extends AbstractCollectorTestCase {
client().prepareIndex(indexName, "foo").setSource("value", randomInt()).get();
}
logger.info("--> create a second index [other] that won't be part of stats collection", indexName, node1);
logger.info("--> create a second index [{}] on node [{}] that won't be part of stats collection", indexName, node1);
client().prepareIndex("other", "bar").setSource("value", randomInt()).get();
flushAndRefresh();

View File

@ -43,7 +43,7 @@ public class TransportDeleteRoleAction extends HandledTransportAction<DeleteRole
}
});
} catch (Exception e) {
logger.error("failed to delete role [{}]", e);
logger.error("failed to delete role [{}]", e, request.name());
listener.onFailure(e);
}
}

View File

@ -43,7 +43,7 @@ public class TransportDeleteUserAction extends HandledTransportAction<DeleteUser
}
});
} catch (Exception e) {
logger.error("failed to delete user [{}]", e);
logger.error("failed to delete user [{}]", e, request.username());
listener.onFailure(e);
}
}

View File

@ -268,7 +268,7 @@ public class InternalAuthenticationService extends AbstractComponent implements
if (logger.isDebugEnabled()) {
logger.debug("failed to extract token from transport message", e);
} else {
logger.warn("failed to extract token from transport message: ", e.getMessage());
logger.warn("failed to extract token from transport message: {}", e.getMessage());
}
auditTrail.authenticationFailed(action, message);
throw failureHandler.exceptionProcessingRequest(message, e);

View File

@ -120,9 +120,9 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
} catch (Exception ee) {
if (logger.isTraceEnabled()) {
logger.trace("realm [" + type() + "] could not authenticate [" + token.principal() + "]", ee);
logger.trace("realm [{}] could not authenticate [{}]", ee, type(), token.principal());
} else if (logger.isDebugEnabled()) {
logger.debug("realm [" + type() + "] could not authenticate [" + token.principal() + "]");
logger.debug("realm [{}] could not authenticate [{}]", type(), token.principal());
}
return null;
}
@ -150,9 +150,9 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
return userWithHash.user;
} catch (ExecutionException ee) {
if (logger.isTraceEnabled()) {
logger.trace("realm [" + name() + "] could not lookup [" + username + "]", ee);
logger.trace("realm [{}] could not lookup [{}]", ee, name(), username);
} else if (logger.isDebugEnabled()) {
logger.debug("realm [" + name() + "] could not authenticate [" + username + "]");
logger.debug("realm [{}] could not authenticate [{}]", name(), username);
}
return null;
}

View File

@ -175,8 +175,7 @@ public class DnRoleMapper {
}
if (logger.isDebugEnabled()) {
logger.debug("the roles [{}], are mapped from the user [{}] for realm [{}/{}]",
(rolesMappedToUserDn == null) ? Collections.emptySet() : rolesMappedToUserDn, realmType, userDnString,
realmType, config.name());
(rolesMappedToUserDn == null) ? Collections.emptySet() : rolesMappedToUserDn, userDnString, realmType, config.name());
}
return roles;
}

View File

@ -63,7 +63,7 @@ public class ShieldPluginEnabledDisabledTests extends ShieldIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
logger.info("******* shield is " + (enabled ? "enabled" : "disabled"));
logger.info("******* shield is {}", enabled ? "enabled" : "disabled");
return Settings.settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
.put(XPackPlugin.featureEnabledSetting(Shield.NAME), enabled)

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.shield.authc.esusers;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
@ -179,6 +180,7 @@ public class FileUserPasswdStoreTests extends ESTestCase {
assertThat(new String(users.get("sha")), equalTo("{SHA}cojt0Pw//L6ToM8G41aOKFIWh7w="));
}
@SuppressLoggerChecks(reason = "mock usage")
public void testParseFile_Empty() throws Exception {
Path empty = createTempFile();
ESLogger log = ESLoggerFactory.getLogger("test");

View File

@ -6,6 +6,7 @@
package org.elasticsearch.shield.authc.esusers;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
@ -187,6 +188,7 @@ public class FileUserRolesStoreTests extends ESTestCase {
assertThat(usersRoles.get("period.user"), arrayContaining("role4"));
}
@SuppressLoggerChecks(reason = "mock usage")
public void testParseFileEmpty() throws Exception {
Path empty = createTempFile();
ESLogger log = ESLoggerFactory.getLogger("test");

View File

@ -69,7 +69,7 @@ public class ValidationTests extends ESTestCase {
public void testESUsersValidatePassword() throws Exception {
String passwd = randomAsciiOfLength(randomIntBetween(0, 20));
logger.info(passwd + "[{}]", passwd.length());
logger.info("{}[{}]", passwd, passwd.length());
if (passwd.length() >= 6) {
assertThat(Validation.ESUsers.validatePassword(passwd.toCharArray()), nullValue());
} else {

View File

@ -156,7 +156,7 @@ public class ServerTransportFilterIntegrationTests extends ShieldIntegTestCase {
" master");
} catch (MasterNotDiscoveredException e) {
// expected
logger.error("expected: " + e);
logger.error("expected exception", e);
}
}
}

View File

@ -126,7 +126,7 @@ public class Account {
try {
transport.close();
} catch (MessagingException me) {
logger.error("failed to close email transport for account [" + config.name + "]");
logger.error("failed to close email transport for account [{}]", config.name);
}
if (contextClassLoader != null) {
setContextClassLoader(contextClassLoader);

View File

@ -104,8 +104,8 @@ public class HttpEmailAttachementParser implements EmailAttachmentParser<HttpReq
httpRequest.host(), httpRequest.port(), httpRequest.method(), httpRequest.path(), response.status());
}
} catch (IOException e) {
logger.error("Error executing HTTP request: [host[{}], port[{}], method[{}], path[{}]: [{}]", e, httpRequest.port(),
httpRequest.method(), httpRequest.path(), e.getMessage());
logger.error("Error executing HTTP request: [host[{}], port[{}], method[{}], path[{}]: [{}]", e, httpRequest.host(),
httpRequest.port(), httpRequest.method(), httpRequest.path(), e.getMessage());
}
throw new ElasticsearchException("Unable to get attachment of type [{}] with id [{}] in watch [{}] aborting watch execution",

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.watcher.actions.logging;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -19,30 +20,35 @@ public enum LoggingLevel implements ToXContent {
ERROR() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
logger.error(text);
}
},
WARN() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
logger.warn(text);
}
},
INFO() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
logger.info(text);
}
},
DEBUG() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
logger.debug(text);
}
},
TRACE() {
@Override
@SuppressLoggerChecks(reason = "logger delegation")
void log(ESLogger logger, String text) {
logger.trace(text);
}

View File

@ -258,7 +258,7 @@ public class ExecutionService extends AbstractComponent {
if (ctx.knownWatch() && watchStore.get(ctx.watch().id()) == null) {
// fail fast if we are trying to execute a deleted watch
String message = "unable to find watch for record [" + ctx.id() + "], perhaps it has been deleted, ignoring...";
logger.warn(message);
logger.warn("{}", message);
record = ctx.abortBeforeExecution(ExecutionState.NOT_EXECUTED_WATCH_MISSING, message);
} else {
@ -332,7 +332,7 @@ public class ExecutionService extends AbstractComponent {
executor.execute(new WatchExecutionTask(ctx));
} catch (EsRejectedExecutionException e) {
String message = "failed to run triggered watch [" + triggeredWatch.id() + "] due to thread pool capacity";
logger.debug(message);
logger.debug("{}", message);
WatchRecord record = ctx.abortBeforeExecution(ExecutionState.FAILED, message);
if (ctx.overrideRecordOnConflict()) {
historyStore.forcePut(record);

View File

@ -155,7 +155,7 @@ public class WatcherIndexTemplateRegistry extends AbstractComponent implements C
public void run() {
try (InputStream is = WatchStore.class.getResourceAsStream("/" + config.getFileName()+ ".json")) {
if (is == null) {
logger.error("Resource [/" + config.getFileName() + ".json] not found in classpath");
logger.error("Resource [/{}.json] not found in classpath", config.getFileName());
return;
}
final byte[] template;

View File

@ -390,7 +390,7 @@ public class EmailActionTests extends ESTestCase {
XContentBuilder builder = jsonBuilder();
executable.toXContent(builder, params);
BytesReference bytes = builder.bytes();
logger.info(bytes.toUtf8());
logger.info("{}", bytes.toUtf8());
XContentParser parser = JsonXContent.jsonXContent.createParser(bytes);
parser.nextToken();

View File

@ -99,7 +99,7 @@ public class EmailAttachmentParsersTests extends ESTestCase {
EmailAttachments emailAttachments = new EmailAttachments(attachments);
XContentBuilder builder = jsonBuilder();
emailAttachments.toXContent(builder, ToXContent.EMPTY_PARAMS);
logger.info("JSON is: " + builder.string());
logger.info("JSON is: {}", builder.string());
assertThat(builder.string(), containsString("my-name.json"));
assertThat(builder.string(), containsString("json"));
assertThat(builder.string(), containsString("other-id"));

View File

@ -186,7 +186,7 @@ public class HipChatActionFactoryTests extends ESTestCase {
XContentBuilder jsonBuilder = jsonBuilder();
action.toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS);
BytesReference bytes = builder.bytes();
logger.info(bytes.toUtf8());
logger.info("{}", bytes.toUtf8());
XContentParser parser = JsonXContent.jsonXContent.createParser(bytes);
parser.nextToken();

View File

@ -236,7 +236,7 @@ public class HipChatActionTests extends ESTestCase {
XContentBuilder jsonBuilder = jsonBuilder();
action.toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS);
BytesReference bytes = builder.bytes();
logger.info(bytes.toUtf8());
logger.info("{}", bytes.toUtf8());
XContentParser parser = JsonXContent.jsonXContent.createParser(bytes);
parser.nextToken();

View File

@ -149,7 +149,7 @@ public class UserAccountTests extends ESTestCase {
}))
.build();
logger.info("expected (r1): " + jsonBuilder().value(reqR1).bytes().toUtf8());
logger.info("expected (r1): {}", jsonBuilder().value(reqR1).bytes().toUtf8());
HttpResponse resR1 = mock(HttpResponse.class);
when(resR1.status()).thenReturn(200);
@ -179,7 +179,7 @@ public class UserAccountTests extends ESTestCase {
}))
.build();
logger.info("expected (r2): " + jsonBuilder().value(reqR1).bytes().toUtf8());
logger.info("expected (r2): {}", jsonBuilder().value(reqR1).bytes().toUtf8());
HttpResponse resR2 = mock(HttpResponse.class);
when(resR2.status()).thenReturn(200);
@ -206,7 +206,7 @@ public class UserAccountTests extends ESTestCase {
}))
.build();
logger.info("expected (u1): " + jsonBuilder().value(reqU1).bytes().toUtf8());
logger.info("expected (u1): {}", jsonBuilder().value(reqU1).bytes().toUtf8());
HttpResponse resU1 = mock(HttpResponse.class);
when(resU1.status()).thenReturn(200);
@ -233,7 +233,7 @@ public class UserAccountTests extends ESTestCase {
}))
.build();
logger.info("expected (u2): " + jsonBuilder().value(reqU2).bytes().toUtf8());
logger.info("expected (u2): {}", jsonBuilder().value(reqU2).bytes().toUtf8());
HttpResponse resU2 = mock(HttpResponse.class);
when(resU2.status()).thenReturn(200);

View File

@ -132,7 +132,7 @@ public class V1AccountTests extends ESTestCase {
.toString())
.build();
logger.info("expected (r1): " + jsonBuilder().value(req1).bytes().toUtf8());
logger.info("expected (r1): {}", jsonBuilder().value(req1).bytes().toUtf8());
HttpResponse res1 = mock(HttpResponse.class);
when(res1.status()).thenReturn(200);
@ -155,7 +155,7 @@ public class V1AccountTests extends ESTestCase {
.toString())
.build();
logger.info("expected (r2): " + jsonBuilder().value(req2).bytes().toUtf8());
logger.info("expected (r2): {}", jsonBuilder().value(req2).bytes().toUtf8());
HttpResponse res2 = mock(HttpResponse.class);
when(res2.status()).thenReturn(200);

View File

@ -6,6 +6,7 @@
package org.elasticsearch.watcher.actions.logging;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -190,6 +191,7 @@ public class LoggingActionTests extends ESTestCase {
}
}
@SuppressLoggerChecks(reason = "mock usage")
static void verifyLogger(ESLogger logger, LoggingLevel level, String text) {
switch (level) {
case ERROR:

View File

@ -177,7 +177,7 @@ public class SlackActionTests extends ESTestCase {
XContentBuilder builder = jsonBuilder();
action.toXContent(builder, ToXContent.EMPTY_PARAMS);
BytesReference bytes = builder.bytes();
logger.info(bytes.toUtf8());
logger.info("{}", bytes.toUtf8());
XContentParser parser = JsonXContent.jsonXContent.createParser(bytes);
parser.nextToken();

View File

@ -126,7 +126,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase
@Override
protected Settings nodeSettings(int nodeOrdinal) {
String scheduleImplName = scheduleEngine().name().toLowerCase(Locale.ROOT);
logger.info("using schedule engine [" + scheduleImplName + "]");
logger.info("using schedule engine [{}]", scheduleImplName);
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
//TODO: for now lets isolate watcher tests from monitoring (randomize this later)

View File

@ -82,7 +82,7 @@ public class ScheduleTriggerEngineMock extends ScheduleTriggerEngine {
public void trigger(String jobName, int times, TimeValue interval) {
for (int i = 0; i < times; i++) {
DateTime now = clock.now(DateTimeZone.UTC);
logger.debug("firing [" + jobName + "] at [" + now + "]");
logger.debug("firing [{}] at [{}]", jobName, now);
ScheduleTriggerEvent event = new ScheduleTriggerEvent(jobName, now, now);
for (Listener listener : listeners) {
listener.triggered(Arrays.<TriggerEvent>asList(event));

View File

@ -46,7 +46,7 @@ public class YearlyScheduleTests extends ScheduleTestCase {
dayStr = dayStr.replace("32", "L");
String monthStr = Strings.collectionToCommaDelimitedString(time.months());
String expression = "0 " + minStr + " " + hrStr + " " + dayStr + " " + monthStr + " ?";
logger.info("expression: " + expression);
logger.info("expression: {}", expression);
assertThat(crons, hasItemInArray(expression));
}
}

View File

@ -203,7 +203,7 @@ public class WatchTests extends ESTestCase {
Watch watch = new Watch("_name", trigger, input, condition, transform, throttlePeriod, actions, metadata, watchStatus);
BytesReference bytes = XContentFactory.jsonBuilder().value(watch).bytes();
logger.info(bytes.toUtf8());
logger.info("{}", bytes.toUtf8());
Watch.Parser watchParser = new Watch.Parser(settings, conditionRegistry, triggerService, transformRegistry, actionRegistry,
inputRegistry, secretService, clock);