MAPREDUCE-7000. Moving logging APIs over to slf4j in hadoop-mapreduce-client-nativetask. Contributed by Jinjiang Ling.

This commit is contained in:
Akira Ajisaka 2017-12-07 16:27:08 +09:00
parent d4cae977a2
commit c2e8a5c229
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
20 changed files with 72 additions and 62 deletions

View File

@ -19,8 +19,6 @@ package org.apache.hadoop.mapred.nativetask;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
@ -36,10 +34,13 @@ import org.apache.hadoop.io.VLongWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.nativetask.serde.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class HadoopPlatform extends Platform {
private static final Log LOG = LogFactory.getLog(HadoopPlatform.class);
private static final Logger LOG =
LoggerFactory.getLogger(HadoopPlatform.class);
public HadoopPlatform() throws IOException {
}

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapred.nativetask;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
@ -31,13 +29,16 @@ import org.apache.hadoop.mapred.nativetask.buffer.InputBuffer;
import org.apache.hadoop.mapred.nativetask.buffer.OutputBuffer;
import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
import org.apache.hadoop.mapred.nativetask.util.ConfigUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* used to create channel, transfer data and command between Java and native
*/
@InterfaceAudience.Private
public class NativeBatchProcessor implements INativeHandler {
private static Log LOG = LogFactory.getLog(NativeBatchProcessor.class);
private static final Logger LOG =
LoggerFactory.getLogger(NativeBatchProcessor.class);
private final String nativeHandlerName;
private long nativeHandlerAddr;
@ -128,7 +129,7 @@ public class NativeBatchProcessor implements INativeHandler {
NativeRuntime.releaseNativeObject(nativeHandlerAddr);
nativeHandlerAddr = 0;
}
IOUtils.cleanup(LOG, in);
IOUtils.cleanupWithLogger(LOG, in);
in = null;
}

View File

@ -21,8 +21,6 @@ import java.io.IOException;
import com.google.common.base.Charsets;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.mapred.InvalidJobConfException;
@ -36,6 +34,8 @@ import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.util.QuickSort;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* native map output collector wrapped in Java interface
@ -43,7 +43,8 @@ import org.apache.hadoop.util.QuickSort;
@InterfaceAudience.Private
public class NativeMapOutputCollectorDelegator<K, V> implements MapOutputCollector<K, V> {
private static Log LOG = LogFactory.getLog(NativeMapOutputCollectorDelegator.class);
private static final Logger LOG =
LoggerFactory.getLogger(NativeMapOutputCollectorDelegator.class);
private JobConf job;
private NativeCollectorOnlyHandler<K, V> handler;

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapred.nativetask;
import java.io.IOException;
import com.google.common.base.Charsets;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataInputBuffer;
@ -33,6 +31,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Task.TaskReporter;
import org.apache.hadoop.mapred.nativetask.util.ConfigUtil;
import org.apache.hadoop.util.VersionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class stands for the native runtime It has three functions:
@ -42,7 +42,8 @@ import org.apache.hadoop.util.VersionInfo;
*/
@InterfaceAudience.Private
public class NativeRuntime {
private static Log LOG = LogFactory.getLog(NativeRuntime.class);
private static final Logger LOG =
LoggerFactory.getLogger(NativeRuntime.class);
private static boolean nativeLibraryLoaded = false;
private static Configuration conf = new Configuration();

View File

@ -20,13 +20,13 @@ package org.apache.hadoop.mapred.nativetask;
import java.io.IOException;
import java.util.ServiceLoader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.nativetask.serde.INativeSerializer;
import org.apache.hadoop.mapred.nativetask.serde.NativeSerialization;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
@ -37,7 +37,7 @@ import org.apache.hadoop.mapred.nativetask.serde.NativeSerialization;
@InterfaceAudience.Private
public class Platforms {
private static final Log LOG = LogFactory.getLog(Platforms.class);
private static final Logger LOG = LoggerFactory.getLogger(Platforms.class);
private static final ServiceLoader<Platform> platforms = ServiceLoader.load(Platform.class);
public static void init(Configuration conf) throws IOException {

View File

@ -19,11 +19,11 @@ package org.apache.hadoop.mapred.nativetask;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.Task.TaskReporter;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Will periodically check status from native and report to MR framework.
@ -31,7 +31,8 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter;
*/
class StatusReportChecker implements Runnable {
private static Log LOG = LogFactory.getLog(StatusReportChecker.class);
private static final Logger LOG =
LoggerFactory.getLogger(StatusReportChecker.class);
public static final int INTERVAL = 1000; // milliseconds
private Thread checker;

View File

@ -21,8 +21,6 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.mapred.RawKeyValueIterator;
@ -33,6 +31,8 @@ import org.apache.hadoop.mapred.nativetask.buffer.BufferType;
import org.apache.hadoop.mapred.nativetask.buffer.ByteBufferDataReader;
import org.apache.hadoop.mapred.nativetask.buffer.InputBuffer;
import org.apache.hadoop.util.Progress;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* actively signal a {@link BufferPullee} to load data into buffer and receive
@ -40,7 +40,7 @@ import org.apache.hadoop.util.Progress;
@InterfaceAudience.Private
public class BufferPuller implements RawKeyValueIterator, DataReceiver {
private static Log LOG = LogFactory.getLog(BufferPuller.class);
private static final Logger LOG = LoggerFactory.getLogger(BufferPuller.class);
public final static int KV_HEADER_LENGTH = Constants.SIZEOF_KV_LENGTH;

View File

@ -21,8 +21,6 @@ import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.nativetask.Constants;
@ -31,6 +29,8 @@ import org.apache.hadoop.mapred.nativetask.buffer.ByteBufferDataReader;
import org.apache.hadoop.mapred.nativetask.buffer.InputBuffer;
import org.apache.hadoop.mapred.nativetask.serde.KVSerializer;
import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* collect data when signaled
@ -38,7 +38,7 @@ import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
@InterfaceAudience.Private
public class BufferPushee<OK, OV> implements Closeable {
private static Log LOG = LogFactory.getLog(BufferPushee.class);
private static final Logger LOG = LoggerFactory.getLogger(BufferPushee.class);
public final static int KV_HEADER_LENGTH = Constants.SIZEOF_KV_LENGTH;

View File

@ -20,8 +20,6 @@ package org.apache.hadoop.mapred.nativetask.handlers;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.nativetask.NativeDataTarget;
@ -29,6 +27,8 @@ import org.apache.hadoop.mapred.nativetask.buffer.ByteBufferDataWriter;
import org.apache.hadoop.mapred.nativetask.serde.IKVSerializer;
import org.apache.hadoop.mapred.nativetask.serde.KVSerializer;
import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* actively push data into a buffer and signal a {@link BufferPushee} to collect it
@ -36,7 +36,7 @@ import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
@InterfaceAudience.Private
public class BufferPusher<K, V> implements OutputCollector<K, V> {
private static Log LOG = LogFactory.getLog(BufferPusher.class);
private static final Logger LOG = LoggerFactory.getLogger(BufferPusher.class);
private final SizedWritable<K> tmpInputKey;
private final SizedWritable<V> tmpInputValue;

View File

@ -19,8 +19,6 @@ package org.apache.hadoop.mapred.nativetask.handlers;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.Counters.Counter;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Task.CombinerRunner;
@ -36,10 +34,13 @@ import org.apache.hadoop.mapred.nativetask.serde.SerializationFramework;
import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class CombinerHandler<K, V> implements ICombineHandler, CommandDispatcher {
public static final String NAME = "NativeTask.CombineHandler";
private static Log LOG = LogFactory.getLog(NativeCollectorOnlyHandler.class);
private static final Logger LOG =
LoggerFactory.getLogger(NativeCollectorOnlyHandler.class);
public static final Command LOAD = new Command(1, "Load");
public static final Command COMBINE = new Command(4, "Combine");
public final CombinerRunner<K, V> combinerRunner;

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapred.nativetask.handlers;
import java.io.Closeable;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@ -37,6 +35,8 @@ import org.apache.hadoop.mapred.nativetask.TaskContext;
import org.apache.hadoop.mapred.nativetask.util.NativeTaskOutput;
import org.apache.hadoop.mapred.nativetask.util.OutputUtil;
import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Java Record Reader + Java Mapper + Native Collector
@ -46,7 +46,8 @@ import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
public class NativeCollectorOnlyHandler<K, V> implements CommandDispatcher, Closeable {
public static final String NAME = "NativeTask.MCollectorOutputHandler";
private static Log LOG = LogFactory.getLog(NativeCollectorOnlyHandler.class);
private static final Logger LOG =
LoggerFactory.getLogger(NativeCollectorOnlyHandler.class);
public static final Command GET_OUTPUT_PATH =
new Command(100, "GET_OUTPUT_PATH");
public static final Command GET_OUTPUT_INDEX_PATH =

View File

@ -20,21 +20,20 @@ package org.apache.hadoop.mapred.nativetask.serde;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.nativetask.Constants;
import org.apache.hadoop.mapred.nativetask.buffer.DataInputStream;
import org.apache.hadoop.mapred.nativetask.buffer.DataOutputStream;
import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class KVSerializer<K, V> implements IKVSerializer {
private static final Log LOG = LogFactory.getLog(KVSerializer.class);
private static final Logger LOG = LoggerFactory.getLogger(KVSerializer.class);
public static final int KV_HEAD_LENGTH = Constants.SIZEOF_KV_LENGTH;

View File

@ -20,15 +20,15 @@ package org.apache.hadoop.mapred.nativetask.util;
import java.lang.reflect.Constructor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class OutputUtil {
private static Log LOG = LogFactory.getLog(OutputUtil.class);
private static final Logger LOG = LoggerFactory.getLogger(OutputUtil.class);
public static final String NATIVE_TASK_OUTPUT_MANAGER = "nativetask.output.manager";
public static NativeTaskOutput createNativeTaskOutput(Configuration conf, String id) {

View File

@ -20,8 +20,6 @@ package org.apache.hadoop.mapred.nativetask.combinertest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -41,11 +39,14 @@ import org.apache.hadoop.util.NativeCodeLoader;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class LargeKVCombinerTest {
private static final Log LOG = LogFactory.getLog(LargeKVCombinerTest.class);
private static final Logger LOG =
LoggerFactory.getLogger(LargeKVCombinerTest.class);
@Before
public void startUp() throws Exception {

View File

@ -20,8 +20,6 @@ package org.apache.hadoop.mapred.nativetask.combinertest;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
@ -33,10 +31,12 @@ import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WordCount {
private static Log LOG = LogFactory.getLog(WordCount.class);
private static final Logger LOG = LoggerFactory.getLogger(WordCount.class);
public static class TokenizerMapper extends Mapper<Object, Text, Text, IntWritable> {

View File

@ -23,8 +23,6 @@ import java.util.zip.CRC32;
import com.google.common.primitives.Longs;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -37,11 +35,13 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.StopWatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KVJob {
public static final String INPUTPATH = "nativetask.kvtest.inputfile.path";
public static final String OUTPUTPATH = "nativetask.kvtest.outputfile.path";
private static final Log LOG = LogFactory.getLog(KVJob.class);
private static final Logger LOG = LoggerFactory.getLogger(KVJob.class);
Job job = null;
public static class ValueMapper<KTYPE, VTYPE> extends Mapper<KTYPE, VTYPE, KTYPE, VTYPE> {

View File

@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -41,13 +39,16 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
@RunWith(Parameterized.class)
public class KVTest {
private static final Log LOG = LogFactory.getLog(KVTest.class);
private static final Logger LOG =
LoggerFactory.getLogger(KVTest.class);
private static Configuration nativekvtestconf = ScenarioConfiguration.getNativeConfiguration();
private static Configuration hadoopkvtestconf = ScenarioConfiguration.getNormalConfiguration();

View File

@ -22,8 +22,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -38,9 +36,11 @@ import org.apache.hadoop.util.NativeCodeLoader;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LargeKVTest {
private static final Log LOG = LogFactory.getLog(LargeKVTest.class);
private static final Logger LOG = LoggerFactory.getLogger(LargeKVTest.class);
@Before
public void startUp() throws Exception {

View File

@ -21,8 +21,6 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -40,10 +38,13 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.nativetask.testutil.BytesFactory;
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestInputFile {
private static Log LOG = LogFactory.getLog(TestInputFile.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestInputFile.class);
public static class KVSizeScope {
private static final int DefaultMinNum = 1;

View File

@ -19,13 +19,14 @@ package org.apache.hadoop.mapred.nativetask.testutil;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.nativetask.NativeMapOutputCollectorDelegator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class EnforceNativeOutputCollectorDelegator<K, V>
extends NativeMapOutputCollectorDelegator<K, V> {
private static final Log LOG = LogFactory.getLog(EnforceNativeOutputCollectorDelegator.class);
private static final Logger LOG =
LoggerFactory.getLogger(EnforceNativeOutputCollectorDelegator.class);
private boolean nativetaskloaded = false;
@Override