MAPREDUCE-6997. Moving logging APIs over to slf4j in hadoop-mapreduce-client-hs. Contributed by Gergely Novák.

This commit is contained in:
Akira Ajisaka 2017-11-08 19:21:43 +09:00
parent 410d0319cf
commit ffee10b68e
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
16 changed files with 74 additions and 65 deletions

View File

@ -30,8 +30,6 @@ import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.Weigher;
import com.google.common.util.concurrent.UncheckedExecutionException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
@ -45,13 +43,16 @@ import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Manages an in memory cache of parsed Job History files.
*/
public class CachedHistoryStorage extends AbstractService implements
HistoryStorage {
private static final Log LOG = LogFactory.getLog(CachedHistoryStorage.class);
private static final Logger LOG =
LoggerFactory.getLogger(CachedHistoryStorage.class);
private LoadingCache<JobId, Job> loadedJobCache = null;
private int loadedJobCacheSize;

View File

@ -31,8 +31,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobACLsManager;
@ -63,6 +61,8 @@ import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.util.Records;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
@ -75,7 +75,7 @@ public class CompletedJob implements org.apache.hadoop.mapreduce.v2.app.job.Job
// so we count it as 0
private static final int UNDEFINED_VALUE = -1;
static final Log LOG = LogFactory.getLog(CompletedJob.class);
private static final Logger LOG = LoggerFactory.getLogger(CompletedJob.class);
private final Configuration conf;
private final JobId jobId; //Can be picked from JobInfo with a conversion.
private final String user; //Can be picked up from JobInfo

View File

@ -20,14 +20,15 @@ package org.apache.hadoop.mapreduce.v2.hs;
import java.net.InetAddress;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.ipc.Server;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Private
public class HSAuditLogger {
private static final Log LOG = LogFactory.getLog(HSAuditLogger.class);
private static final Logger LOG =
LoggerFactory.getLogger(HSAuditLogger.class);
enum Keys {
USER, OPERATION, TARGET, RESULT, IP, PERMISSIONS, DESCRIPTION

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapreduce.v2.hs;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
@ -38,11 +36,13 @@ import org.apache.hadoop.security.protocolPB.RefreshUserMappingsProtocolPB;
import org.apache.hadoop.tools.GetUserMappingsProtocol;
import org.apache.hadoop.tools.protocolPB.GetUserMappingsProtocolClientSideTranslatorPB;
import org.apache.hadoop.tools.protocolPB.GetUserMappingsProtocolPB;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Private
public class HSProxies {
private static final Log LOG = LogFactory.getLog(HSProxies.class);
private static final Logger LOG = LoggerFactory.getLogger(HSProxies.class);
@SuppressWarnings("unchecked")
public static <T> T createProxy(Configuration conf, InetSocketAddress hsaddr,

View File

@ -26,8 +26,6 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@ -90,6 +88,8 @@ import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebApps;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This module is responsible for talking to the
@ -98,7 +98,8 @@ import com.google.common.annotations.VisibleForTesting;
*/
public class HistoryClientService extends AbstractService {
private static final Log LOG = LogFactory.getLog(HistoryClientService.class);
private static final Logger LOG =
LoggerFactory.getLogger(HistoryClientService.class);
private HSClientProtocol protocolHandler;
private Server server;

View File

@ -40,8 +40,6 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@ -76,6 +74,8 @@ import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class provides a way to interact with history files in a thread safe
@ -84,8 +84,10 @@ import org.apache.hadoop.yarn.util.SystemClock;
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class HistoryFileManager extends AbstractService {
private static final Log LOG = LogFactory.getLog(HistoryFileManager.class);
private static final Log SUMMARY_LOG = LogFactory.getLog(JobSummary.class);
private static final Logger LOG =
LoggerFactory.getLogger(HistoryFileManager.class);
private static final Logger SUMMARY_LOG =
LoggerFactory.getLogger(JobSummary.class);
private enum HistoryInfoState {
IN_INTERMEDIATE, IN_DONE, DELETED, MOVE_FAILED

View File

@ -27,8 +27,6 @@ import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
@ -44,6 +42,8 @@ import org.apache.hadoop.mapreduce.v2.api.MRDelegationTokenIdentifier;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.security.token.delegation.DelegationKey;
import org.apache.hadoop.util.Shell;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Private
@Unstable
@ -54,8 +54,8 @@ import org.apache.hadoop.util.Shell;
public class HistoryServerFileSystemStateStoreService
extends HistoryServerStateStoreService {
public static final Log LOG =
LogFactory.getLog(HistoryServerFileSystemStateStoreService.class);
public static final Logger LOG =
LoggerFactory.getLogger(HistoryServerFileSystemStateStoreService.class);
private static final String ROOT_STATE_DIR_NAME = "HistoryServerState";
private static final String TOKEN_STATE_DIR_NAME = "tokens";
@ -192,7 +192,7 @@ public class HistoryServerFileSystemStateStoreService
dataStream.close();
dataStream = null;
} finally {
IOUtils.cleanup(LOG, dataStream);
IOUtils.cleanupWithLogger(LOG, dataStream);
}
createNewFile(keyPath, memStream.toByteArray());
@ -265,7 +265,7 @@ public class HistoryServerFileSystemStateStoreService
out.close();
out = null;
} finally {
IOUtils.cleanup(LOG, out);
IOUtils.cleanupWithLogger(LOG, out);
}
} catch (IOException e) {
fs.delete(file, false);
@ -279,7 +279,7 @@ public class HistoryServerFileSystemStateStoreService
try {
in.readFully(data);
} finally {
IOUtils.cleanup(LOG, in);
IOUtils.cleanupWithLogger(LOG, in);
}
return data;
}
@ -306,7 +306,7 @@ public class HistoryServerFileSystemStateStoreService
dataStream.close();
dataStream = null;
} finally {
IOUtils.cleanup(LOG, dataStream);
IOUtils.cleanupWithLogger(LOG, dataStream);
}
return memStream.toByteArray();
}
@ -320,7 +320,7 @@ public class HistoryServerFileSystemStateStoreService
try {
key.readFields(in);
} finally {
IOUtils.cleanup(LOG, in);
IOUtils.cleanupWithLogger(LOG, in);
}
state.tokenMasterKeyState.add(key);
}
@ -349,7 +349,7 @@ public class HistoryServerFileSystemStateStoreService
tokenId.readFields(in);
renewDate = in.readLong();
} finally {
IOUtils.cleanup(LOG, in);
IOUtils.cleanupWithLogger(LOG, in);
}
state.tokenState.put(tokenId, renewDate);
return tokenId;

View File

@ -29,8 +29,6 @@ import java.io.File;
import java.io.IOException;
import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -47,8 +45,9 @@ import org.fusesource.leveldbjni.JniDBFactory;
import org.fusesource.leveldbjni.internal.NativeDB;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.DBException;
import org.iq80.leveldb.Logger;
import org.iq80.leveldb.Options;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HistoryServerLeveldbStateStoreService extends
HistoryServerStateStoreService {
@ -63,8 +62,8 @@ public class HistoryServerLeveldbStateStoreService extends
private DB db;
public static final Log LOG =
LogFactory.getLog(HistoryServerLeveldbStateStoreService.class);
public static final Logger LOG =
LoggerFactory.getLogger(HistoryServerLeveldbStateStoreService.class);
@Override
protected void initStorage(Configuration conf) throws IOException {
@ -158,7 +157,7 @@ public class HistoryServerLeveldbStateStoreService extends
try {
key.readFields(in);
} finally {
IOUtils.cleanup(LOG, in);
IOUtils.cleanupWithLogger(LOG, in);
}
state.tokenMasterKeyState.add(key);
}
@ -204,7 +203,7 @@ public class HistoryServerLeveldbStateStoreService extends
tokenId.readFields(in);
renewDate = in.readLong();
} finally {
IOUtils.cleanup(LOG, in);
IOUtils.cleanupWithLogger(LOG, in);
}
state.tokenState.put(tokenId, renewDate);
}
@ -224,7 +223,7 @@ public class HistoryServerLeveldbStateStoreService extends
dataStream.close();
dataStream = null;
} finally {
IOUtils.cleanup(LOG, dataStream);
IOUtils.cleanupWithLogger(LOG, dataStream);
}
String dbKey = getTokenDatabaseKey(tokenId);
@ -270,7 +269,7 @@ public class HistoryServerLeveldbStateStoreService extends
dataStream.close();
dataStream = null;
} finally {
IOUtils.cleanup(LOG, dataStream);
IOUtils.cleanupWithLogger(LOG, dataStream);
}
String dbKey = getTokenMasterKeyDatabaseKey(masterKey);
@ -368,8 +367,9 @@ public class HistoryServerLeveldbStateStoreService extends
}
}
private static class LeveldbLogger implements Logger {
private static final Log LOG = LogFactory.getLog(LeveldbLogger.class);
private static class LeveldbLogger implements org.iq80.leveldb.Logger {
private static final Logger LOG =
LoggerFactory.getLogger(LeveldbLogger.class);
@Override
public void log(String message) {

View File

@ -21,14 +21,14 @@ package org.apache.hadoop.mapreduce.v2.hs;
import java.io.IOException;
import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.v2.api.MRDelegationTokenIdentifier;
import org.apache.hadoop.mapreduce.v2.hs.HistoryServerStateStoreService.HistoryServerState;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
import org.apache.hadoop.security.token.delegation.DelegationKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A MapReduce specific delegation token secret manager.
@ -40,7 +40,7 @@ import org.apache.hadoop.security.token.delegation.DelegationKey;
public class JHSDelegationTokenSecretManager
extends AbstractDelegationTokenSecretManager<MRDelegationTokenIdentifier> {
private static final Log LOG = LogFactory.getLog(
private static final Logger LOG = LoggerFactory.getLogger(
JHSDelegationTokenSecretManager.class);
private HistoryServerStateStoreService store;

View File

@ -27,8 +27,6 @@ import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.MRJobConfig;
@ -56,12 +54,14 @@ import org.apache.hadoop.yarn.util.Clock;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Loads and manages the Job history cache.
*/
public class JobHistory extends AbstractService implements HistoryContext {
private static final Log LOG = LogFactory.getLog(JobHistory.class);
private static final Logger LOG = LoggerFactory.getLogger(JobHistory.class);
public static final Pattern CONF_FILENAME_REGEX = Pattern.compile("("
+ JobID.JOBID_REGEX + ")_conf.xml(?:\\.[0-9]+\\.old)?");

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapreduce.v2.hs;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobConf;
@ -47,6 +45,8 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogDeletionService;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/******************************************************************
* {@link JobHistoryServer} is responsible for servicing all job history
@ -62,7 +62,8 @@ public class JobHistoryServer extends CompositeService {
public static final long historyServerTimeStamp = System.currentTimeMillis();
private static final Log LOG = LogFactory.getLog(JobHistoryServer.class);
private static final Logger LOG =
LoggerFactory.getLogger(JobHistoryServer.class);
protected HistoryContext historyContext;
private HistoryClientService clientService;
private JobHistory jobHistoryService;
@ -225,7 +226,7 @@ public class JobHistoryServer extends CompositeService {
jobHistoryServer.init(conf);
jobHistoryServer.start();
} catch (Throwable t) {
LOG.fatal("Error starting JobHistoryServer", t);
LOG.error("Error starting JobHistoryServer", t);
ExitUtil.terminate(-1, "Error starting JobHistoryServer");
}
return jobHistoryServer;

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapreduce.v2.hs;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.TaskCompletionEvent;
@ -41,10 +39,12 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PartialJob implements org.apache.hadoop.mapreduce.v2.app.job.Job {
private static final Log LOG = LogFactory.getLog(PartialJob.class);
private static final Logger LOG = LoggerFactory.getLogger(PartialJob.class);
private JobIndexInfo jobIndexInfo = null;
private JobId jobId = null;

View File

@ -22,8 +22,6 @@ import java.io.IOException;
import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@ -54,11 +52,14 @@ import org.apache.hadoop.mapreduce.v2.hs.protocolPB.HSAdminRefreshProtocolServer
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.BlockingService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Private
public class HSAdminServer extends AbstractService implements HSAdminProtocol {
private static final Log LOG = LogFactory.getLog(HSAdminServer.class);
private static final Logger LOG =
LoggerFactory.getLogger(HSAdminServer.class);
private AccessControlList adminAcl;
private AggregatedLogDeletionService aggLogDelService = null;

View File

@ -22,8 +22,6 @@ import java.util.Map;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
@ -44,9 +42,12 @@ import org.apache.hadoop.service.Service;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.EventHandler;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestJobHistoryEvents {
private static final Log LOG = LogFactory.getLog(TestJobHistoryEvents.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestJobHistoryEvents.class);
@Test
public void testHistoryEvents() throws Exception {

View File

@ -38,8 +38,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileContext;
@ -96,9 +94,12 @@ import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestJobHistoryParsing {
private static final Log LOG = LogFactory.getLog(TestJobHistoryParsing.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestJobHistoryParsing.class);
private static final String RACK_NAME = "/MyRackName";

View File

@ -35,8 +35,6 @@ import java.io.PrintWriter;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
@ -54,9 +52,11 @@ import org.junit.Test;
import com.google.inject.AbstractModule;
import com.google.inject.Injector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestHSWebApp {
private static final Log LOG = LogFactory.getLog(TestHSWebApp.class);
private static final Logger LOG = LoggerFactory.getLogger(TestHSWebApp.class);
@Test public void testAppControllerIndex() {
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);