YARN-6873. Moving logging APIs over to slf4j in hadoop-yarn-server-applicationhistoryservice. Contributed by Yeliang Cang.

This commit is contained in:
Akira Ajisaka 2017-08-01 10:53:32 +09:00
parent ea568123fa
commit 1a78c0ff01
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
18 changed files with 111 additions and 107 deletions

View File

@ -22,8 +22,6 @@ import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.ArrayList; import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@ -61,11 +59,13 @@ import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.server.timeline.security.authorize.TimelinePolicyProvider; import org.apache.hadoop.yarn.server.timeline.security.authorize.TimelinePolicyProvider;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ApplicationHistoryClientService extends AbstractService implements public class ApplicationHistoryClientService extends AbstractService implements
ApplicationHistoryProtocol { ApplicationHistoryProtocol {
private static final Log LOG = LogFactory private static final Logger LOG =
.getLog(ApplicationHistoryClientService.class); LoggerFactory.getLogger(ApplicationHistoryClientService.class);
private ApplicationHistoryManager history; private ApplicationHistoryManager history;
private Server server; private Server server;
private InetSocketAddress bindAddress; private InetSocketAddress bindAddress;

View File

@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
@ -42,11 +40,13 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.records.Container
import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ApplicationHistoryManagerImpl extends AbstractService implements public class ApplicationHistoryManagerImpl extends AbstractService implements
ApplicationHistoryManager { ApplicationHistoryManager {
private static final Log LOG = LogFactory private static final Logger LOG =
.getLog(ApplicationHistoryManagerImpl.class); LoggerFactory.getLogger(ApplicationHistoryManagerImpl.class);
private static final String UNAVAILABLE = "N/A"; private static final String UNAVAILABLE = "N/A";
private ApplicationHistoryStore historyStore; private ApplicationHistoryStore historyStore;

View File

@ -28,8 +28,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.AuthorizationException;
@ -69,12 +67,14 @@ import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ApplicationHistoryManagerOnTimelineStore extends AbstractService public class ApplicationHistoryManagerOnTimelineStore extends AbstractService
implements implements
ApplicationHistoryManager { ApplicationHistoryManager {
private static final Log LOG = LogFactory private static final Logger LOG = LoggerFactory
.getLog(ApplicationHistoryManagerOnTimelineStore.class); .getLogger(ApplicationHistoryManagerOnTimelineStore.class);
@VisibleForTesting @VisibleForTesting
static final String UNAVAILABLE = "N/A"; static final String UNAVAILABLE = "N/A";

View File

@ -22,8 +22,6 @@ import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.ArrayList; import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServer2;
@ -60,6 +58,8 @@ import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.webapp.WebAppContext; import org.eclipse.jetty.webapp.WebAppContext;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* History server that keeps track of all types of history in the cluster. * History server that keeps track of all types of history in the cluster.
@ -68,8 +68,8 @@ import com.google.common.annotations.VisibleForTesting;
public class ApplicationHistoryServer extends CompositeService { public class ApplicationHistoryServer extends CompositeService {
public static final int SHUTDOWN_HOOK_PRIORITY = 30; public static final int SHUTDOWN_HOOK_PRIORITY = 30;
private static final Log LOG = LogFactory private static final Logger LOG = LoggerFactory
.getLog(ApplicationHistoryServer.class); .getLogger(ApplicationHistoryServer.class);
private ApplicationHistoryClientService ahsClientService; private ApplicationHistoryClientService ahsClientService;
private ApplicationACLsManager aclsManager; private ApplicationACLsManager aclsManager;
@ -178,7 +178,7 @@ public class ApplicationHistoryServer extends CompositeService {
appHistoryServer.init(conf); appHistoryServer.init(conf);
appHistoryServer.start(); appHistoryServer.start();
} catch (Throwable t) { } catch (Throwable t) {
LOG.fatal("Error starting ApplicationHistoryServer", t); LOG.error("Error starting ApplicationHistoryServer", t);
ExitUtil.terminate(-1, "Error starting ApplicationHistoryServer"); ExitUtil.terminate(-1, "Error starting ApplicationHistoryServer");
} }
return appHistoryServer; return appHistoryServer;

View File

@ -30,8 +30,6 @@ import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -74,6 +72,8 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.C
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.InvalidProtocolBufferException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* File system implementation of {@link ApplicationHistoryStore}. In this * File system implementation of {@link ApplicationHistoryStore}. In this
@ -89,8 +89,8 @@ import com.google.protobuf.InvalidProtocolBufferException;
public class FileSystemApplicationHistoryStore extends AbstractService public class FileSystemApplicationHistoryStore extends AbstractService
implements ApplicationHistoryStore { implements ApplicationHistoryStore {
private static final Log LOG = LogFactory private static final Logger LOG = LoggerFactory
.getLog(FileSystemApplicationHistoryStore.class); .getLogger(FileSystemApplicationHistoryStore.class);
private static final String ROOT_DIR_NAME = "ApplicationHistoryDataRoot"; private static final String ROOT_DIR_NAME = "ApplicationHistoryDataRoot";
private static final int MIN_BLOCK_SIZE = 256 * 1024; private static final int MIN_BLOCK_SIZE = 256 * 1024;
@ -141,7 +141,7 @@ public class FileSystemApplicationHistoryStore extends AbstractService
} }
outstandingWriters.clear(); outstandingWriters.clear();
} finally { } finally {
IOUtils.cleanup(LOG, fs); IOUtils.cleanupWithLogger(LOG, fs);
} }
super.serviceStop(); super.serviceStop();
} }
@ -711,12 +711,12 @@ public class FileSystemApplicationHistoryStore extends AbstractService
} }
public void reset() throws IOException { public void reset() throws IOException {
IOUtils.cleanup(LOG, scanner); IOUtils.cleanupWithLogger(LOG, scanner);
scanner = reader.createScanner(); scanner = reader.createScanner();
} }
public void close() { public void close() {
IOUtils.cleanup(LOG, scanner, reader, fsdis); IOUtils.cleanupWithLogger(LOG, scanner, reader, fsdis);
} }
} }
@ -740,13 +740,13 @@ public class FileSystemApplicationHistoryStore extends AbstractService
YarnConfiguration.DEFAULT_FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE), null, YarnConfiguration.DEFAULT_FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE), null,
getConfig()); getConfig());
} catch (IOException e) { } catch (IOException e) {
IOUtils.cleanup(LOG, fsdos); IOUtils.cleanupWithLogger(LOG, fsdos);
throw e; throw e;
} }
} }
public synchronized void close() { public synchronized void close() {
IOUtils.cleanup(LOG, writer, fsdos); IOUtils.cleanupWithLogger(LOG, writer, fsdos);
} }
public synchronized void writeHistoryData(HistoryDataKey key, byte[] value) public synchronized void writeHistoryData(HistoryDataKey key, byte[] value)
@ -756,13 +756,13 @@ public class FileSystemApplicationHistoryStore extends AbstractService
dos = writer.prepareAppendKey(-1); dos = writer.prepareAppendKey(-1);
key.write(dos); key.write(dos);
} finally { } finally {
IOUtils.cleanup(LOG, dos); IOUtils.cleanupWithLogger(LOG, dos);
} }
try { try {
dos = writer.prepareAppendValue(value.length); dos = writer.prepareAppendValue(value.length);
dos.write(value); dos.write(value);
} finally { } finally {
IOUtils.cleanup(LOG, dos); IOUtils.cleanupWithLogger(LOG, dos);
} }
} }

View File

@ -42,8 +42,6 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.core.StreamingOutput;
import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.Response.Status;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
@ -80,12 +78,15 @@ import com.google.inject.Inject;
import com.google.inject.Singleton; import com.google.inject.Singleton;
import com.sun.jersey.api.client.ClientHandlerException; import com.sun.jersey.api.client.ClientHandlerException;
import com.sun.jersey.api.client.UniformInterfaceException; import com.sun.jersey.api.client.UniformInterfaceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton @Singleton
@Path("/ws/v1/applicationhistory") @Path("/ws/v1/applicationhistory")
public class AHSWebServices extends WebServices { public class AHSWebServices extends WebServices {
private static final Log LOG = LogFactory.getLog(AHSWebServices.class); private static final Logger LOG = LoggerFactory
.getLogger(AHSWebServices.class);
private static final String NM_DOWNLOAD_URI_STR = private static final String NM_DOWNLOAD_URI_STR =
"/ws/v1/node/containers"; "/ws/v1/node/containers";
private static final Joiner JOINER = Joiner.on(""); private static final Joiner JOINER = Joiner.on("");

View File

@ -18,20 +18,20 @@
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp; package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender; import org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class NavBlock extends HtmlBlock { public class NavBlock extends HtmlBlock {
@Override @Override
public void render(Block html) { public void render(Block html) {
boolean addErrorsAndWarningsLink = false; boolean addErrorsAndWarningsLink = false;
Log log = LogFactory.getLog(NavBlock.class); Logger log = LoggerFactory.getLogger(NavBlock.class);
if (log instanceof Log4JLogger) { if (log instanceof Log4JLogger) {
Log4jWarningErrorMetricsAppender appender = Log4jWarningErrorMetricsAppender appender =
Log4jWarningErrorMetricsAppender.findAppender(); Log4jWarningErrorMetricsAppender.findAppender();

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.yarn.server.timeline; package org.apache.hadoop.yarn.server.timeline;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
@ -33,6 +31,8 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEnt
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
import org.apache.hadoop.yarn.server.timeline.TimelineDataManager.CheckAcl; import org.apache.hadoop.yarn.server.timeline.TimelineDataManager.CheckAcl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -71,8 +71,8 @@ abstract class KeyValueBasedTimelineStore
private boolean serviceStopped = false; private boolean serviceStopped = false;
private static final Log LOG private static final Logger LOG
= LogFactory.getLog(KeyValueBasedTimelineStore.class); = LoggerFactory.getLogger(KeyValueBasedTimelineStore.class);
public KeyValueBasedTimelineStore() { public KeyValueBasedTimelineStore() {
super(KeyValueBasedTimelineStore.class.getName()); super(KeyValueBasedTimelineStore.class.getName());

View File

@ -22,8 +22,6 @@ import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.collections.map.LRUMap; import org.apache.commons.collections.map.LRUMap;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -48,6 +46,7 @@ import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser;
import org.apache.hadoop.yarn.server.utils.LeveldbIterator; import org.apache.hadoop.yarn.server.utils.LeveldbIterator;
import org.fusesource.leveldbjni.JniDBFactory; import org.fusesource.leveldbjni.JniDBFactory;
import org.iq80.leveldb.*; import org.iq80.leveldb.*;
import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -118,8 +117,8 @@ import static org.fusesource.leveldbjni.JniDBFactory.bytes;
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class LeveldbTimelineStore extends AbstractService public class LeveldbTimelineStore extends AbstractService
implements TimelineStore { implements TimelineStore {
private static final Log LOG = LogFactory private static final org.slf4j.Logger LOG = LoggerFactory
.getLog(LeveldbTimelineStore.class); .getLogger(LeveldbTimelineStore.class);
@Private @Private
@VisibleForTesting @VisibleForTesting
@ -240,7 +239,7 @@ public class LeveldbTimelineStore extends AbstractService
localFS.setPermission(dbPath, LEVELDB_DIR_UMASK); localFS.setPermission(dbPath, LEVELDB_DIR_UMASK);
} }
} finally { } finally {
IOUtils.cleanup(LOG, localFS); IOUtils.cleanupWithLogger(LOG, localFS);
} }
LOG.info("Using leveldb path " + dbPath); LOG.info("Using leveldb path " + dbPath);
try { try {
@ -284,7 +283,7 @@ public class LeveldbTimelineStore extends AbstractService
" closing db now", e); " closing db now", e);
} }
} }
IOUtils.cleanup(LOG, db); IOUtils.cleanupWithLogger(LOG, db);
super.serviceStop(); super.serviceStop();
} }
@ -320,7 +319,7 @@ public class LeveldbTimelineStore extends AbstractService
discardOldEntities(timestamp); discardOldEntities(timestamp);
Thread.sleep(ttlInterval); Thread.sleep(ttlInterval);
} catch (IOException e) { } catch (IOException e) {
LOG.error(e); LOG.error(e.toString());
} catch (InterruptedException e) { } catch (InterruptedException e) {
LOG.info("Deletion thread received interrupt, exiting"); LOG.info("Deletion thread received interrupt, exiting");
break; break;
@ -394,7 +393,7 @@ public class LeveldbTimelineStore extends AbstractService
} catch(DBException e) { } catch(DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, iterator); IOUtils.cleanupWithLogger(LOG, iterator);
} }
} }
@ -570,7 +569,7 @@ public class LeveldbTimelineStore extends AbstractService
} catch(DBException e) { } catch(DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, iterator); IOUtils.cleanupWithLogger(LOG, iterator);
} }
return events; return events;
} }
@ -753,7 +752,7 @@ public class LeveldbTimelineStore extends AbstractService
} catch(DBException e) { } catch(DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, iterator); IOUtils.cleanupWithLogger(LOG, iterator);
} }
} }
@ -925,7 +924,7 @@ public class LeveldbTimelineStore extends AbstractService
} finally { } finally {
lock.unlock(); lock.unlock();
writeLocks.returnLock(lock); writeLocks.returnLock(lock);
IOUtils.cleanup(LOG, writeBatch); IOUtils.cleanupWithLogger(LOG, writeBatch);
} }
for (EntityIdentifier relatedEntity : relatedEntitiesWithoutStartTimes) { for (EntityIdentifier relatedEntity : relatedEntitiesWithoutStartTimes) {
@ -1376,7 +1375,7 @@ public class LeveldbTimelineStore extends AbstractService
} catch(DBException e) { } catch(DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, iterator); IOUtils.cleanupWithLogger(LOG, iterator);
} }
} }
@ -1506,7 +1505,7 @@ public class LeveldbTimelineStore extends AbstractService
} catch(DBException e) { } catch(DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, writeBatch); IOUtils.cleanupWithLogger(LOG, writeBatch);
} }
} }
@ -1548,7 +1547,7 @@ public class LeveldbTimelineStore extends AbstractService
LOG.error("Got IOException while deleting entities for type " + LOG.error("Got IOException while deleting entities for type " +
entityType + ", continuing to next type", e); entityType + ", continuing to next type", e);
} finally { } finally {
IOUtils.cleanup(LOG, iterator, pfIterator); IOUtils.cleanupWithLogger(LOG, iterator, pfIterator);
deleteLock.writeLock().unlock(); deleteLock.writeLock().unlock();
if (typeCount > 0) { if (typeCount > 0) {
LOG.info("Deleted " + typeCount + " entities of type " + LOG.info("Deleted " + typeCount + " entities of type " +
@ -1629,7 +1628,7 @@ public class LeveldbTimelineStore extends AbstractService
String incompatibleMessage = String incompatibleMessage =
"Incompatible version for timeline store: expecting version " "Incompatible version for timeline store: expecting version "
+ getCurrentVersion() + ", but loading version " + loadedVersion; + getCurrentVersion() + ", but loading version " + loadedVersion;
LOG.fatal(incompatibleMessage); LOG.error(incompatibleMessage);
throw new IOException(incompatibleMessage); throw new IOException(incompatibleMessage);
} }
} }
@ -1718,7 +1717,7 @@ public class LeveldbTimelineStore extends AbstractService
} catch(DBException e) { } catch(DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, writeBatch); IOUtils.cleanupWithLogger(LOG, writeBatch);
} }
} }
@ -1755,7 +1754,7 @@ public class LeveldbTimelineStore extends AbstractService
} catch(DBException e) { } catch(DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, iterator); IOUtils.cleanupWithLogger(LOG, iterator);
} }
} }
@ -1805,7 +1804,7 @@ public class LeveldbTimelineStore extends AbstractService
} catch(DBException e) { } catch(DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, iterator); IOUtils.cleanupWithLogger(LOG, iterator);
} }
} }

View File

@ -33,8 +33,6 @@ import java.util.Map.Entry;
import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.time.FastDateFormat; import org.apache.commons.lang.time.FastDateFormat;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -45,6 +43,8 @@ import org.fusesource.leveldbjni.JniDBFactory;
import org.iq80.leveldb.DB; import org.iq80.leveldb.DB;
import org.iq80.leveldb.Options; import org.iq80.leveldb.Options;
import org.iq80.leveldb.WriteBatch; import org.iq80.leveldb.WriteBatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* Contains the logic to lookup a leveldb by timestamp so that multiple smaller * Contains the logic to lookup a leveldb by timestamp so that multiple smaller
@ -54,7 +54,8 @@ import org.iq80.leveldb.WriteBatch;
class RollingLevelDB { class RollingLevelDB {
/** Logger for this class. */ /** Logger for this class. */
private static final Log LOG = LogFactory.getLog(RollingLevelDB.class); private static final Logger LOG = LoggerFactory.
getLogger(RollingLevelDB.class);
/** Factory to open and create new leveldb instances. */ /** Factory to open and create new leveldb instances. */
private static JniDBFactory factory = new JniDBFactory(); private static JniDBFactory factory = new JniDBFactory();
/** Thread safe date formatter. */ /** Thread safe date formatter. */
@ -151,7 +152,7 @@ class RollingLevelDB {
} }
public void close() { public void close() {
IOUtils.cleanup(LOG, writeBatch); IOUtils.cleanupWithLogger(LOG, writeBatch);
} }
} }
@ -346,7 +347,7 @@ class RollingLevelDB {
.iterator(); .iterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
Entry<Long, DB> entry = iterator.next(); Entry<Long, DB> entry = iterator.next();
IOUtils.cleanup(LOG, entry.getValue()); IOUtils.cleanupWithLogger(LOG, entry.getValue());
String dbName = fdf.format(entry.getKey()); String dbName = fdf.format(entry.getKey());
Path path = new Path(rollingDBPath, getName() + "." + dbName); Path path = new Path(rollingDBPath, getName() + "." + dbName);
try { try {
@ -361,9 +362,9 @@ class RollingLevelDB {
public void stop() throws Exception { public void stop() throws Exception {
for (DB db : rollingdbs.values()) { for (DB db : rollingdbs.values()) {
IOUtils.cleanup(LOG, db); IOUtils.cleanupWithLogger(LOG, db);
} }
IOUtils.cleanup(LOG, lfs); IOUtils.cleanupWithLogger(LOG, lfs);
} }
private long computeNextCheckMillis(long now) { private long computeNextCheckMillis(long now) {

View File

@ -38,8 +38,6 @@ import java.util.TreeMap;
import org.apache.commons.collections.map.LRUMap; import org.apache.commons.collections.map.LRUMap;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -76,6 +74,8 @@ import org.iq80.leveldb.ReadOptions;
import org.iq80.leveldb.WriteBatch; import org.iq80.leveldb.WriteBatch;
import org.nustaq.serialization.FSTConfiguration; import org.nustaq.serialization.FSTConfiguration;
import org.nustaq.serialization.FSTClazzNameRegistry; import org.nustaq.serialization.FSTClazzNameRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.nio.charset.StandardCharsets.UTF_8; import static java.nio.charset.StandardCharsets.UTF_8;
@ -168,8 +168,8 @@ import static org.fusesource.leveldbjni.JniDBFactory.bytes;
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class RollingLevelDBTimelineStore extends AbstractService implements public class RollingLevelDBTimelineStore extends AbstractService implements
TimelineStore { TimelineStore {
private static final Log LOG = LogFactory private static final Logger LOG = LoggerFactory
.getLog(RollingLevelDBTimelineStore.class); .getLogger(RollingLevelDBTimelineStore.class);
private static FSTConfiguration fstConf = private static FSTConfiguration fstConf =
FSTConfiguration.createDefaultConfiguration(); FSTConfiguration.createDefaultConfiguration();
// Fall back to 2.24 parsing if 2.50 parsing fails // Fall back to 2.24 parsing if 2.50 parsing fails
@ -368,9 +368,9 @@ public class RollingLevelDBTimelineStore extends AbstractService implements
+ " closing db now", e); + " closing db now", e);
} }
} }
IOUtils.cleanup(LOG, domaindb); IOUtils.cleanupWithLogger(LOG, domaindb);
IOUtils.cleanup(LOG, starttimedb); IOUtils.cleanupWithLogger(LOG, starttimedb);
IOUtils.cleanup(LOG, ownerdb); IOUtils.cleanupWithLogger(LOG, ownerdb);
entitydb.stop(); entitydb.stop();
indexdb.stop(); indexdb.stop();
super.serviceStop(); super.serviceStop();
@ -399,7 +399,7 @@ public class RollingLevelDBTimelineStore extends AbstractService implements
discardOldEntities(timestamp); discardOldEntities(timestamp);
Thread.sleep(ttlInterval); Thread.sleep(ttlInterval);
} catch (IOException e) { } catch (IOException e) {
LOG.error(e); LOG.error(e.toString());
} catch (InterruptedException e) { } catch (InterruptedException e) {
LOG.info("Deletion thread received interrupt, exiting"); LOG.info("Deletion thread received interrupt, exiting");
break; break;
@ -1525,7 +1525,7 @@ public class RollingLevelDBTimelineStore extends AbstractService implements
+ ". Total start times deleted so far this cycle: " + ". Total start times deleted so far this cycle: "
+ startTimesCount); + startTimesCount);
} }
IOUtils.cleanup(LOG, writeBatch); IOUtils.cleanupWithLogger(LOG, writeBatch);
writeBatch = starttimedb.createWriteBatch(); writeBatch = starttimedb.createWriteBatch();
batchSize = 0; batchSize = 0;
} }
@ -1545,7 +1545,7 @@ public class RollingLevelDBTimelineStore extends AbstractService implements
LOG.info("Deleted " + startTimesCount + "/" + totalCount LOG.info("Deleted " + startTimesCount + "/" + totalCount
+ " start time entities earlier than " + minStartTime); + " start time entities earlier than " + minStartTime);
} finally { } finally {
IOUtils.cleanup(LOG, writeBatch); IOUtils.cleanupWithLogger(LOG, writeBatch);
} }
return startTimesCount; return startTimesCount;
} }
@ -1622,7 +1622,7 @@ public class RollingLevelDBTimelineStore extends AbstractService implements
String incompatibleMessage = "Incompatible version for timeline store: " String incompatibleMessage = "Incompatible version for timeline store: "
+ "expecting version " + getCurrentVersion() + "expecting version " + getCurrentVersion()
+ ", but loading version " + loadedVersion; + ", but loading version " + loadedVersion;
LOG.fatal(incompatibleMessage); LOG.error(incompatibleMessage);
throw new IOException(incompatibleMessage); throw new IOException(incompatibleMessage);
} }
} }

View File

@ -26,8 +26,6 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.SortedSet; import java.util.SortedSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
@ -45,6 +43,8 @@ import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager;
import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.BadRequestException;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* The class wrap over the timeline store and the ACLs manager. It does some non * The class wrap over the timeline store and the ACLs manager. It does some non
@ -54,7 +54,8 @@ import com.google.common.annotations.VisibleForTesting;
*/ */
public class TimelineDataManager extends AbstractService { public class TimelineDataManager extends AbstractService {
private static final Log LOG = LogFactory.getLog(TimelineDataManager.class); private static final Logger LOG =
LoggerFactory.getLogger(TimelineDataManager.class);
@VisibleForTesting @VisibleForTesting
public static final String DEFAULT_DOMAIN_ID = "DEFAULT"; public static final String DEFAULT_DOMAIN_ID = "DEFAULT";

View File

@ -28,8 +28,6 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -50,6 +48,8 @@ import org.iq80.leveldb.DB;
import org.iq80.leveldb.DBException; import org.iq80.leveldb.DBException;
import org.iq80.leveldb.Options; import org.iq80.leveldb.Options;
import org.iq80.leveldb.WriteBatch; import org.iq80.leveldb.WriteBatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.fusesource.leveldbjni.JniDBFactory.bytes; import static org.fusesource.leveldbjni.JniDBFactory.bytes;
@ -60,8 +60,8 @@ import static org.fusesource.leveldbjni.JniDBFactory.bytes;
public class LeveldbTimelineStateStore extends public class LeveldbTimelineStateStore extends
TimelineStateStore { TimelineStateStore {
public static final Log LOG = public static final Logger LOG =
LogFactory.getLog(LeveldbTimelineStateStore.class); LoggerFactory.getLogger(LeveldbTimelineStateStore.class);
private static final String DB_NAME = "timeline-state-store.ldb"; private static final String DB_NAME = "timeline-state-store.ldb";
private static final FsPermission LEVELDB_DIR_UMASK = FsPermission private static final FsPermission LEVELDB_DIR_UMASK = FsPermission
@ -103,7 +103,7 @@ public class LeveldbTimelineStateStore extends
localFS.setPermission(dbPath, LEVELDB_DIR_UMASK); localFS.setPermission(dbPath, LEVELDB_DIR_UMASK);
} }
} finally { } finally {
IOUtils.cleanup(LOG, localFS); IOUtils.cleanupWithLogger(LOG, localFS);
} }
JniDBFactory factory = new JniDBFactory(); JniDBFactory factory = new JniDBFactory();
try { try {
@ -131,7 +131,7 @@ public class LeveldbTimelineStateStore extends
@Override @Override
protected void closeStorage() throws IOException { protected void closeStorage() throws IOException {
IOUtils.cleanup(LOG, db); IOUtils.cleanupWithLogger(LOG, db);
} }
@Override @Override
@ -168,8 +168,8 @@ public class LeveldbTimelineStateStore extends
} catch (DBException e) { } catch (DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, ds); IOUtils.cleanupWithLogger(LOG, ds);
IOUtils.cleanup(LOG, batch); IOUtils.cleanupWithLogger(LOG, batch);
} }
} }
@ -239,7 +239,7 @@ public class LeveldbTimelineStateStore extends
key.write(dataStream); key.write(dataStream);
dataStream.close(); dataStream.close();
} finally { } finally {
IOUtils.cleanup(LOG, dataStream); IOUtils.cleanupWithLogger(LOG, dataStream);
} }
return memStream.toByteArray(); return memStream.toByteArray();
} }
@ -253,7 +253,7 @@ public class LeveldbTimelineStateStore extends
try { try {
key.readFields(in); key.readFields(in);
} finally { } finally {
IOUtils.cleanup(LOG, in); IOUtils.cleanupWithLogger(LOG, in);
} }
state.tokenMasterKeyState.add(key); state.tokenMasterKeyState.add(key);
} }
@ -267,7 +267,7 @@ public class LeveldbTimelineStateStore extends
try { try {
data.readFields(in); data.readFields(in);
} finally { } finally {
IOUtils.cleanup(LOG, in); IOUtils.cleanupWithLogger(LOG, in);
} }
state.tokenState.put(data.getTokenIdentifier(), data.getRenewDate()); state.tokenState.put(data.getTokenIdentifier(), data.getRenewDate());
} }
@ -290,7 +290,7 @@ public class LeveldbTimelineStateStore extends
++numKeys; ++numKeys;
} }
} finally { } finally {
IOUtils.cleanup(LOG, iterator); IOUtils.cleanupWithLogger(LOG, iterator);
} }
return numKeys; return numKeys;
} }
@ -314,7 +314,7 @@ public class LeveldbTimelineStateStore extends
} catch (DBException e) { } catch (DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(LOG, iterator); IOUtils.cleanupWithLogger(LOG, iterator);
} }
return numTokens; return numTokens;
} }
@ -332,7 +332,7 @@ public class LeveldbTimelineStateStore extends
try { try {
state.latestSequenceNumber = in.readInt(); state.latestSequenceNumber = in.readInt();
} finally { } finally {
IOUtils.cleanup(LOG, in); IOUtils.cleanupWithLogger(LOG, in);
} }
} }
} }
@ -412,7 +412,7 @@ public class LeveldbTimelineStateStore extends
String incompatibleMessage = String incompatibleMessage =
"Incompatible version for timeline state store: expecting version " "Incompatible version for timeline state store: expecting version "
+ getCurrentVersion() + ", but loading version " + loadedVersion; + getCurrentVersion() + ", but loading version " + loadedVersion;
LOG.fatal(incompatibleMessage); LOG.error(incompatibleMessage);
throw new IOException(incompatibleMessage); throw new IOException(incompatibleMessage);
} }
} }

View File

@ -24,8 +24,6 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.collections.map.LRUMap; import org.apache.commons.collections.map.LRUMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
@ -41,6 +39,8 @@ import org.apache.hadoop.yarn.server.timeline.TimelineStore;
import org.apache.hadoop.yarn.util.StringHelper; import org.apache.hadoop.yarn.util.StringHelper;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* <code>TimelineACLsManager</code> check the entity level timeline data access. * <code>TimelineACLsManager</code> check the entity level timeline data access.
@ -48,7 +48,8 @@ import com.google.common.annotations.VisibleForTesting;
@Private @Private
public class TimelineACLsManager { public class TimelineACLsManager {
private static final Log LOG = LogFactory.getLog(TimelineACLsManager.class); private static final Logger LOG = LoggerFactory.
getLogger(TimelineACLsManager.class);
private static final int DOMAIN_ACCESS_ENTRY_CACHE_SIZE = 100; private static final int DOMAIN_ACCESS_ENTRY_CACHE_SIZE = 100;
private AdminACLsManager adminAclsManager; private AdminACLsManager adminAclsManager;

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.yarn.server.timeline.security;
import java.io.IOException; import java.io.IOException;
import java.util.Map.Entry; import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -35,6 +33,8 @@ import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore; import org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore;
import org.apache.hadoop.yarn.server.timeline.recovery.TimelineStateStore; import org.apache.hadoop.yarn.server.timeline.recovery.TimelineStateStore;
import org.apache.hadoop.yarn.server.timeline.recovery.TimelineStateStore.TimelineServiceState; import org.apache.hadoop.yarn.server.timeline.recovery.TimelineStateStore.TimelineServiceState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* The service wrapper of {@link TimelineDelegationTokenSecretManager} * The service wrapper of {@link TimelineDelegationTokenSecretManager}
@ -118,8 +118,8 @@ public class TimelineDelegationTokenSecretManagerService extends
public static class TimelineDelegationTokenSecretManager extends public static class TimelineDelegationTokenSecretManager extends
AbstractDelegationTokenSecretManager<TimelineDelegationTokenIdentifier> { AbstractDelegationTokenSecretManager<TimelineDelegationTokenIdentifier> {
public static final Log LOG = public static final Logger LOG =
LogFactory.getLog(TimelineDelegationTokenSecretManager.class); LoggerFactory.getLogger(TimelineDelegationTokenSecretManager.class);
private TimelineStateStore stateStore; private TimelineStateStore stateStore;

View File

@ -43,8 +43,6 @@ import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
@ -68,13 +66,16 @@ import org.apache.hadoop.yarn.webapp.NotFoundException;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.google.inject.Singleton; import com.google.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton @Singleton
@Path("/ws/v1/timeline") @Path("/ws/v1/timeline")
//TODO: support XML serialization/deserialization //TODO: support XML serialization/deserialization
public class TimelineWebServices { public class TimelineWebServices {
private static final Log LOG = LogFactory.getLog(TimelineWebServices.class); private static final Logger LOG = LoggerFactory
.getLogger(TimelineWebServices.class);
private TimelineDataManager timelineDataManager; private TimelineDataManager timelineDataManager;

View File

@ -32,8 +32,6 @@ import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times; import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -51,12 +49,14 @@ import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestFileSystemApplicationHistoryStore extends public class TestFileSystemApplicationHistoryStore extends
ApplicationHistoryStoreTestUtils { ApplicationHistoryStoreTestUtils {
private static Log LOG = LogFactory private static final Logger LOG = LoggerFactory
.getLog(TestFileSystemApplicationHistoryStore.class.getName()); .getLogger(TestFileSystemApplicationHistoryStore.class.getName());
private FileSystem fs; private FileSystem fs;
private Path fsWorkingPath; private Path fsWorkingPath;

View File

@ -160,7 +160,7 @@ public class TestLeveldbTimelineStore extends TimelineStoreTestUtils {
} catch(DBException e) { } catch(DBException e) {
throw new IOException(e); throw new IOException(e);
} finally { } finally {
IOUtils.cleanup(null, iterator, pfIterator); IOUtils.cleanupWithLogger(null, iterator, pfIterator);
} }
} }