MAPREDUCE-7001. Moving logging APIs over to slf4j in hadoop-mapreduce-client-shuffle. Contributed by Jinjiang Ling.

This commit is contained in:
Akira Ajisaka 2017-11-08 19:28:08 +09:00
parent ffee10b68e
commit e4c220ee4f
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
5 changed files with 28 additions and 24 deletions

View File

@ -22,11 +22,11 @@ import java.io.FileDescriptor;
import java.io.IOException;
import java.io.RandomAccessFile;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.ReadaheadPool;
import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_DONTNEED;
@ -34,7 +34,8 @@ import org.jboss.netty.handler.stream.ChunkedFile;
public class FadvisedChunkedFile extends ChunkedFile {
private static final Log LOG = LogFactory.getLog(FadvisedChunkedFile.class);
private static final Logger LOG =
LoggerFactory.getLogger(FadvisedChunkedFile.class);
private final boolean manageOsCache;
private final int readaheadLength;

View File

@ -25,11 +25,11 @@ import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.ReadaheadPool;
import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_DONTNEED;
@ -39,7 +39,8 @@ import com.google.common.annotations.VisibleForTesting;
public class FadvisedFileRegion extends DefaultFileRegion {
private static final Log LOG = LogFactory.getLog(FadvisedFileRegion.class);
private static final Logger LOG =
LoggerFactory.getLogger(FadvisedFileRegion.class);
private final boolean manageOsCache;
private final int readaheadLength;

View File

@ -54,8 +54,6 @@ import java.util.regex.Pattern;
import javax.crypto.SecretKey;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputByteBuffer;
@ -134,6 +132,7 @@ import org.jboss.netty.util.CharsetUtil;
import org.jboss.netty.util.HashedWheelTimer;
import org.jboss.netty.util.Timer;
import org.eclipse.jetty.http.HttpHeader;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
@ -148,9 +147,10 @@ import com.google.protobuf.ByteString;
public class ShuffleHandler extends AuxiliaryService {
private static final Log LOG = LogFactory.getLog(ShuffleHandler.class);
private static final Log AUDITLOG =
LogFactory.getLog(ShuffleHandler.class.getName()+".audit");
private static final org.slf4j.Logger LOG =
LoggerFactory.getLogger(ShuffleHandler.class);
private static final org.slf4j.Logger AUDITLOG =
LoggerFactory.getLogger(ShuffleHandler.class.getName()+".audit");
public static final String SHUFFLE_MANAGE_OS_CACHE = "mapreduce.shuffle.manage.os.cache";
public static final boolean DEFAULT_SHUFFLE_MANAGE_OS_CACHE = true;
@ -775,7 +775,8 @@ public class ShuffleHandler extends AuxiliaryService {
}
private static class LevelDBLogger implements Logger {
private static final Log LOG = LogFactory.getLog(LevelDBLogger.class);
private static final org.slf4j.Logger LOG =
LoggerFactory.getLogger(LevelDBLogger.class);
@Override
public void log(String message) {

View File

@ -25,18 +25,18 @@ import java.io.RandomAccessFile;
import java.nio.channels.WritableByteChannel;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.StringUtils;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestFadvisedFileRegion {
private final int FILE_SIZE = 16*1024*1024;
private static final Log LOG =
LogFactory.getLog(TestFadvisedFileRegion.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestFadvisedFileRegion.class);
@Test(timeout = 100000)
public void testCustomShuffleTransfer() throws IOException {
@ -66,7 +66,7 @@ public class TestFadvisedFileRegion {
try{
out.write(initBuff);
} finally {
IOUtils.cleanup(LOG, out);
IOUtils.cleanupWithLogger(LOG, out);
}
@ -106,9 +106,9 @@ public class TestFadvisedFileRegion {
if (fileRegion != null) {
fileRegion.releaseExternalResources();
}
IOUtils.cleanup(LOG, target);
IOUtils.cleanup(LOG, targetFile);
IOUtils.cleanup(LOG, inputFile);
IOUtils.cleanupWithLogger(LOG, target);
IOUtils.cleanupWithLogger(LOG, targetFile);
IOUtils.cleanupWithLogger(LOG, inputFile);
}
//Read the target file and verify that copy is done correctly
@ -123,7 +123,7 @@ public class TestFadvisedFileRegion {
Assert.assertEquals(initBuff[position+i], buff[i]);
}
} finally {
IOUtils.cleanup(LOG, in);
IOUtils.cleanupWithLogger(LOG, in);
}
//delete files and folders

View File

@ -48,8 +48,6 @@ import java.util.Map;
import java.util.zip.CheckedOutputStream;
import java.util.zip.Checksum;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -99,10 +97,13 @@ import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.Mockito;
import org.eclipse.jetty.http.HttpHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestShuffleHandler {
static final long MiB = 1024 * 1024;
private static final Log LOG = LogFactory.getLog(TestShuffleHandler.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestShuffleHandler.class);
private static final File ABS_LOG_DIR = GenericTestUtils.getTestDir(
TestShuffleHandler.class.getSimpleName() + "LocDir");