YARN-6916. Moving logging APIs over to slf4j in hadoop-yarn-server-common. Contributed by Bibin A Chundatt and Akira Ajisaka.

(cherry picked from commit 4a87773718)
This commit is contained in:
Akira Ajisaka 2017-10-04 06:06:36 +09:00
parent d2c3b93ee5
commit 2fa1d39952
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
10 changed files with 60 additions and 75 deletions

View File

@ -21,17 +21,18 @@ package org.apache.hadoop.yarn.server.api;
import java.io.IOException; import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.client.RMProxy; import org.apache.hadoop.yarn.client.RMProxy;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ServerRMProxy<T> extends RMProxy<T> { public class ServerRMProxy<T> extends RMProxy<T> {
private static final Log LOG = LogFactory.getLog(ServerRMProxy.class); private static final Logger LOG =
LoggerFactory.getLogger(ServerRMProxy.class);
private ServerRMProxy() { private ServerRMProxy() {
super(); super();

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.yarn.server.scheduler; package org.apache.hadoop.yarn.server.scheduler;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
@ -42,6 +40,8 @@ import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.resource.DominantResourceCalculator; import org.apache.hadoop.yarn.util.resource.DominantResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator; import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.Resources; import org.apache.hadoop.yarn.util.resource.Resources;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.ArrayList; import java.util.ArrayList;
@ -180,8 +180,8 @@ public class OpportunisticContainerAllocator {
} }
} }
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(OpportunisticContainerAllocator.class); LoggerFactory.getLogger(OpportunisticContainerAllocator.class);
private static final ResourceCalculator RESOURCE_CALCULATOR = private static final ResourceCalculator RESOURCE_CALCULATOR =
new DominantResourceCalculator(); new DominantResourceCalculator();
@ -255,12 +255,11 @@ public class OpportunisticContainerAllocator {
appContext.getContainerIdGenerator(), appContext.getBlacklist(), appContext.getContainerIdGenerator(), appContext.getBlacklist(),
appAttId, appContext.getNodeMap(), userName, containers, anyAsk); appAttId, appContext.getNodeMap(), userName, containers, anyAsk);
if (!containers.isEmpty()) { if (!containers.isEmpty()) {
LOG.info("Opportunistic allocation requested for [" LOG.info("Opportunistic allocation requested for [priority={}, "
+ "priority=" + anyAsk.getPriority() + "allocationRequestId={}, num_containers={}, capability={}] "
+ ", allocationRequestId=" + anyAsk.getAllocationRequestId() + "allocated = {}", anyAsk.getPriority(),
+ ", num_containers=" + anyAsk.getNumContainers() anyAsk.getAllocationRequestId(), anyAsk.getNumContainers(),
+ ", capability=" + anyAsk.getCapability() + "]" anyAsk.getCapability(), containers.keySet());
+ " allocated = " + containers.keySet());
} }
} }
return containers; return containers;
@ -286,8 +285,7 @@ public class OpportunisticContainerAllocator {
} }
if (nodesForScheduling.isEmpty()) { if (nodesForScheduling.isEmpty()) {
LOG.warn("No nodes available for allocating opportunistic containers. [" + LOG.warn("No nodes available for allocating opportunistic containers. [" +
"allNodes=" + allNodes + ", " + "allNodes={}, blacklist={}]", allNodes, blacklist);
"blacklist=" + blacklist + "]");
return; return;
} }
int numAllocated = 0; int numAllocated = 0;
@ -305,9 +303,9 @@ public class OpportunisticContainerAllocator {
} }
cList.add(container); cList.add(container);
numAllocated++; numAllocated++;
LOG.info("Allocated [" + container.getId() + "] as opportunistic."); LOG.info("Allocated [{}] as opportunistic.", container.getId());
} }
LOG.info("Allocated " + numAllocated + " opportunistic containers."); LOG.info("Allocated {} opportunistic containers.", numAllocated);
} }
private Container buildContainer(long rmIdentifier, private Container buildContainer(long rmIdentifier,

View File

@ -23,14 +23,14 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.api.records.MasterKey; import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* SecretManager for ContainerTokens. Extended by both RM and NM and hence is * SecretManager for ContainerTokens. Extended by both RM and NM and hence is
@ -40,8 +40,8 @@ import org.apache.hadoop.yarn.server.api.records.MasterKey;
public class BaseContainerTokenSecretManager extends public class BaseContainerTokenSecretManager extends
SecretManager<ContainerTokenIdentifier> { SecretManager<ContainerTokenIdentifier> {
private static Log LOG = LogFactory private static final Logger LOG =
.getLog(BaseContainerTokenSecretManager.class); LoggerFactory.getLogger(BaseContainerTokenSecretManager.class);
protected int serialNo = new SecureRandom().nextInt(); protected int serialNo = new SecureRandom().nextInt();
@ -86,11 +86,9 @@ public class BaseContainerTokenSecretManager extends
@Override @Override
public byte[] createPassword(ContainerTokenIdentifier identifier) { public byte[] createPassword(ContainerTokenIdentifier identifier) {
if (LOG.isDebugEnabled()) { LOG.debug("Creating password for {} for user {} to be run on NM {}",
LOG.debug("Creating password for " + identifier.getContainerID() identifier.getContainerID(), identifier.getUser(),
+ " for user " + identifier.getUser() + " to be run on NM " identifier.getNmHostAddress());
+ identifier.getNmHostAddress());
}
this.readLock.lock(); this.readLock.lock();
try { try {
return createPassword(identifier.getBytes(), return createPassword(identifier.getBytes(),
@ -114,11 +112,9 @@ public class BaseContainerTokenSecretManager extends
protected byte[] retrievePasswordInternal(ContainerTokenIdentifier identifier, protected byte[] retrievePasswordInternal(ContainerTokenIdentifier identifier,
MasterKeyData masterKey) MasterKeyData masterKey)
throws org.apache.hadoop.security.token.SecretManager.InvalidToken { throws org.apache.hadoop.security.token.SecretManager.InvalidToken {
if (LOG.isDebugEnabled()) { LOG.debug("Retrieving password for {} for user {} to be run on NM {}",
LOG.debug("Retrieving password for " + identifier.getContainerID() identifier.getContainerID(), identifier.getUser(),
+ " for user " + identifier.getUser() + " to be run on NM " identifier.getNmHostAddress());
+ identifier.getNmHostAddress());
}
return createPassword(identifier.getBytes(), masterKey.getSecretKey()); return createPassword(identifier.getBytes(), masterKey.getSecretKey());
} }

View File

@ -24,8 +24,6 @@ import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
@ -35,12 +33,14 @@ import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.security.NMTokenIdentifier; import org.apache.hadoop.yarn.security.NMTokenIdentifier;
import org.apache.hadoop.yarn.server.api.records.MasterKey; import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BaseNMTokenSecretManager extends public class BaseNMTokenSecretManager extends
SecretManager<NMTokenIdentifier> { SecretManager<NMTokenIdentifier> {
private static Log LOG = LogFactory private static final Logger LOG =
.getLog(BaseNMTokenSecretManager.class); LoggerFactory.getLogger(BaseNMTokenSecretManager.class);
protected int serialNo = new SecureRandom().nextInt(); protected int serialNo = new SecureRandom().nextInt();
@ -71,12 +71,9 @@ public class BaseNMTokenSecretManager extends
@Override @Override
protected byte[] createPassword(NMTokenIdentifier identifier) { protected byte[] createPassword(NMTokenIdentifier identifier) {
if (LOG.isDebugEnabled()) { LOG.debug("creating password for {} for user {} to run on NM {}",
LOG.debug("creating password for " identifier.getApplicationAttemptId(),
+ identifier.getApplicationAttemptId() + " for user " identifier.getApplicationSubmitter(), identifier.getNodeId());
+ identifier.getApplicationSubmitter() + " to run on NM "
+ identifier.getNodeId());
}
readLock.lock(); readLock.lock();
try { try {
return createPassword(identifier.getBytes(), return createPassword(identifier.getBytes(),
@ -99,12 +96,9 @@ public class BaseNMTokenSecretManager extends
protected byte[] retrivePasswordInternal(NMTokenIdentifier identifier, protected byte[] retrivePasswordInternal(NMTokenIdentifier identifier,
MasterKeyData masterKey) { MasterKeyData masterKey) {
if (LOG.isDebugEnabled()) { LOG.debug("retriving password for {} for user {} to run on NM {}",
LOG.debug("creating password for " identifier.getApplicationAttemptId(),
+ identifier.getApplicationAttemptId() + " for user " identifier.getApplicationSubmitter(), identifier.getNodeId());
+ identifier.getApplicationSubmitter() + " to run on NM "
+ identifier.getNodeId());
}
return createPassword(identifier.getBytes(), masterKey.getSecretKey()); return createPassword(identifier.getBytes(), masterKey.getSecretKey());
} }
/** /**

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.yarn.server.sharedcache; package org.apache.hadoop.yarn.server.sharedcache;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* A utility class that contains helper methods for dealing with the internal * A utility class that contains helper methods for dealing with the internal
@ -34,7 +34,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
@Unstable @Unstable
public class SharedCacheUtil { public class SharedCacheUtil {
private static final Log LOG = LogFactory.getLog(SharedCacheUtil.class); private static final Logger LOG =
LoggerFactory.getLogger(SharedCacheUtil.class);
@Private @Private
public static int getCacheDepth(Configuration conf) { public static int getCacheDepth(Configuration conf) {
@ -44,9 +45,8 @@ public class SharedCacheUtil {
if (cacheDepth <= 0) { if (cacheDepth <= 0) {
LOG.warn("Specified cache depth was less than or equal to zero." LOG.warn("Specified cache depth was less than or equal to zero."
+ " Using default value instead. Default: " + " Using default value instead. Default: {}, Specified: {}",
+ YarnConfiguration.DEFAULT_SHARED_CACHE_NESTED_LEVEL YarnConfiguration.DEFAULT_SHARED_CACHE_NESTED_LEVEL, cacheDepth);
+ ", Specified: " + cacheDepth);
cacheDepth = YarnConfiguration.DEFAULT_SHARED_CACHE_NESTED_LEVEL; cacheDepth = YarnConfiguration.DEFAULT_SHARED_CACHE_NESTED_LEVEL;
} }

View File

@ -26,8 +26,6 @@ import java.util.Collection;
import java.util.List; import java.util.List;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol; import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
@ -46,10 +44,13 @@ import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet.TBODY;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock; import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AppAttemptBlock extends HtmlBlock { public class AppAttemptBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(AppAttemptBlock.class); private static final Logger LOG =
LoggerFactory.getLogger(AppAttemptBlock.class);
protected ApplicationBaseProtocol appBaseProt; protected ApplicationBaseProtocol appBaseProt;
protected ApplicationAttemptId appAttemptId = null; protected ApplicationAttemptId appAttemptId = null;

View File

@ -29,8 +29,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
@ -67,10 +65,12 @@ import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock; import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AppBlock extends HtmlBlock { public class AppBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(AppBlock.class); private static final Logger LOG = LoggerFactory.getLogger(AppBlock.class);
protected ApplicationBaseProtocol appBaseProt; protected ApplicationBaseProtocol appBaseProt;
protected Configuration conf; protected Configuration conf;
protected ApplicationId appID = null; protected ApplicationId appID = null;

View File

@ -34,8 +34,6 @@ import java.util.List;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.math.LongRange; import org.apache.commons.lang.math.LongRange;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol; import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
@ -51,10 +49,12 @@ import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet.TBODY;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AppsBlock extends HtmlBlock { public class AppsBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(AppsBlock.class); private static final Logger LOG = LoggerFactory.getLogger(AppsBlock.class);
protected ApplicationBaseProtocol appBaseProt; protected ApplicationBaseProtocol appBaseProt;
protected EnumSet<YarnApplicationState> reqAppStates; protected EnumSet<YarnApplicationState> reqAppStates;
protected UserGroupInformation callerUGI; protected UserGroupInformation callerUGI;

View File

@ -23,8 +23,6 @@ import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
import java.io.IOException; import java.io.IOException;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol; import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
@ -38,10 +36,13 @@ import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock; import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ContainerBlock extends HtmlBlock { public class ContainerBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(ContainerBlock.class); private static final Logger LOG =
LoggerFactory.getLogger(ContainerBlock.class);
protected ApplicationBaseProtocol appBaseProt; protected ApplicationBaseProtocol appBaseProt;
@Inject @Inject

View File

@ -19,11 +19,9 @@
package org.apache.hadoop.yarn.server.webapp; package org.apache.hadoop.yarn.server.webapp;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.GenericsUtil;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.security.AdminACLsManager; import org.apache.hadoop.yarn.security.AdminACLsManager;
import org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender; import org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender;
@ -60,8 +58,6 @@ public class ErrorsAndWarningsBlock extends HtmlBlock {
@Override @Override
protected void render(Block html) { protected void render(Block html) {
Log log = LogFactory.getLog(ErrorsAndWarningsBlock.class);
boolean isAdmin = false; boolean isAdmin = false;
UserGroupInformation callerUGI = this.getCallerUGI(); UserGroupInformation callerUGI = this.getCallerUGI();
@ -78,7 +74,7 @@ public class ErrorsAndWarningsBlock extends HtmlBlock {
return; return;
} }
if (log instanceof Log4JLogger) { if (GenericsUtil.isLog4jLogger(ErrorsAndWarningsBlock.class)) {
html.__(ErrorMetrics.class); html.__(ErrorMetrics.class);
html.__(WarningMetrics.class); html.__(WarningMetrics.class);
html.div().button().$onclick("reloadPage()").b("View data for the last ") html.div().button().$onclick("reloadPage()").b("View data for the last ")
@ -180,8 +176,7 @@ public class ErrorsAndWarningsBlock extends HtmlBlock {
cutoffs.add((now - 43200 * 1000) / 1000); cutoffs.add((now - 43200 * 1000) / 1000);
cutoffs.add((now - 84600 * 1000) / 1000); cutoffs.add((now - 84600 * 1000) / 1000);
Log log = LogFactory.getLog(ErrorsAndWarningsBlock.class); if (GenericsUtil.isLog4jLogger(ErrorsAndWarningsBlock.class)) {
if (log instanceof Log4JLogger) {
appender = appender =
Log4jWarningErrorMetricsAppender.findAppender(); Log4jWarningErrorMetricsAppender.findAppender();
} }
@ -193,8 +188,7 @@ public class ErrorsAndWarningsBlock extends HtmlBlock {
@Override @Override
protected void render(Block html) { protected void render(Block html) {
Log log = LogFactory.getLog(ErrorsAndWarningsBlock.class); if (GenericsUtil.isLog4jLogger(ErrorsAndWarningsBlock.class)) {
if (log instanceof Log4JLogger) {
Hamlet.DIV<Hamlet> div = Hamlet.DIV<Hamlet> div =
html.div().$class("metrics").$style("padding-bottom: 20px"); html.div().$class("metrics").$style("padding-bottom: 20px");
div.h3(tableHeading).table("#metricsoverview").thead() div.h3(tableHeading).table("#metricsoverview").thead()