YARN-7141. Move logging APIs to slf4j in timelineservice after ATSv2 merge. Contributed by Varun Saxena
(cherry picked from commit dcd0bedcc8
)
Conflicts:
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMHATimelineCollectors.java
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/security/TimelineReaderWhitelistAuthorizationFilter.java
This commit is contained in:
parent
1289b056ac
commit
f7637b55bb
|
@ -26,8 +26,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineW
|
|||
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -40,8 +38,6 @@ import static org.junit.Assert.assertEquals;
|
|||
* transition.
|
||||
*/
|
||||
public class TestRMHATimelineCollectors extends RMHATestBase {
|
||||
public static final Logger LOG = LoggerFactory
|
||||
.getLogger(TestRMHATimelineCollectors.class);
|
||||
|
||||
@Before
|
||||
@Override
|
||||
|
|
|
@ -86,9 +86,7 @@ public final class EntityTypeReader extends AbstractTimelineStorageReader {
|
|||
typeFilterList.addFilter(new FirstKeyOnlyFilter());
|
||||
typeFilterList.addFilter(new KeyOnlyFilter());
|
||||
typeFilterList.addFilter(new PageFilter(1));
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("FilterList created for scan is - " + typeFilterList);
|
||||
}
|
||||
LOG.debug("FilterList created for scan is - {}", typeFilterList);
|
||||
|
||||
int counter = 0;
|
||||
while (true) {
|
||||
|
@ -112,10 +110,7 @@ public final class EntityTypeReader extends AbstractTimelineStorageReader {
|
|||
currRowKey = getNextRowKey(prefix.getRowKeyPrefix(), currType);
|
||||
}
|
||||
}
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Scanned " + counter + "records for "
|
||||
+ types.size() + "types");
|
||||
}
|
||||
LOG.debug("Scanned {} records for {} types", counter, types.size());
|
||||
return types;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,8 +19,6 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.subapplication;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
|
@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.regionserver.BloomType;
|
|||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
|
||||
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The sub application table has column families:
|
||||
|
@ -103,7 +103,7 @@ public class SubApplicationTable extends BaseTable<SubApplicationTable> {
|
|||
/** default max number of versions. */
|
||||
private static final int DEFAULT_METRICS_MAX_VERSIONS = 10000;
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(
|
||||
private static final Logger LOG = LoggerFactory.getLogger(
|
||||
SubApplicationTable.class);
|
||||
|
||||
public SubApplicationTable() {
|
||||
|
|
|
@ -112,9 +112,7 @@ public class AppLevelTimelineCollectorWithAgg
|
|||
private class AppLevelAggregator implements Runnable {
|
||||
|
||||
private void aggregate() {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("App-level real-time aggregating");
|
||||
}
|
||||
LOG.debug("App-level real-time aggregating");
|
||||
if (!isReadyToAggregate()) {
|
||||
LOG.warn("App-level collector is not ready, skip aggregation. ");
|
||||
return;
|
||||
|
@ -137,9 +135,7 @@ public class AppLevelTimelineCollectorWithAgg
|
|||
} catch (Exception e) {
|
||||
LOG.error("Error aggregating timeline metrics", e);
|
||||
}
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("App-level real-time aggregation complete");
|
||||
}
|
||||
LOG.debug("App-level real-time aggregation complete");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.hadoop.yarn.server.timelineservice.security;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
@ -30,6 +28,8 @@ import org.apache.hadoop.security.token.Token;
|
|||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
|
||||
import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
|
||||
import org.apache.hadoop.yarn.server.timeline.security.TimelineDelgationTokenSecretManagerService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The service wrapper of {@link TimelineV2DelegationTokenSecretManager}.
|
||||
|
@ -75,8 +75,8 @@ public class TimelineV2DelegationTokenSecretManagerService extends
|
|||
public static class TimelineV2DelegationTokenSecretManager extends
|
||||
AbstractDelegationTokenSecretManager<TimelineDelegationTokenIdentifier> {
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TimelineV2DelegationTokenSecretManager.class);
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(TimelineV2DelegationTokenSecretManager.class);
|
||||
|
||||
/**
|
||||
* Create a timeline v2 secret manager.
|
||||
|
|
Loading…
Reference in New Issue