HADOOP-15552. Move logging APIs over to slf4j in hadoop-tools - Part2. Contributed by Ian Pickering.

This commit is contained in:
Akira Ajisaka 2018-08-16 00:31:59 +09:00
parent f2315f2e9c
commit 3e3963b035
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
91 changed files with 282 additions and 280 deletions

View File

@ -69,7 +69,7 @@ public abstract class AbstractContractRenameTest extends
} else { } else {
// at least one FS only returns false here, if that is the case // at least one FS only returns false here, if that is the case
// warn but continue // warn but continue
getLog().warn("Rename returned {} renaming a nonexistent file", renamed); getLogger().warn("Rename returned {} renaming a nonexistent file", renamed);
assertFalse("Renaming a missing file returned true", renamed); assertFalse("Renaming a missing file returned true", renamed);
} }
} catch (FileNotFoundException e) { } catch (FileNotFoundException e) {
@ -118,7 +118,7 @@ public abstract class AbstractContractRenameTest extends
if (renamed && !renameReturnsFalseOnRenameDestExists) { if (renamed && !renameReturnsFalseOnRenameDestExists) {
//expected an exception //expected an exception
String destDirLS = generateAndLogErrorListing(srcFile, destFile); String destDirLS = generateAndLogErrorListing(srcFile, destFile);
getLog().error("dest dir {}", destDirLS); getLogger().error("dest dir {}", destDirLS);
fail("expected rename(" + srcFile + ", " + destFile + " ) to fail," + fail("expected rename(" + srcFile + ", " + destFile + " ) to fail," +
" but got success and destination of " + destDirLS); " but got success and destination of " + destDirLS);
} }

View File

@ -132,7 +132,7 @@ public abstract class AbstractContractSeekTest extends AbstractFSContractTestBas
@Test @Test
public void testSeekReadClosedFile() throws Throwable { public void testSeekReadClosedFile() throws Throwable {
instream = getFileSystem().open(smallSeekFile); instream = getFileSystem().open(smallSeekFile);
getLog().debug( getLogger().debug(
"Stream is of type " + instream.getClass().getCanonicalName()); "Stream is of type " + instream.getClass().getCanonicalName());
instream.close(); instream.close();
try { try {

View File

@ -110,7 +110,7 @@ public abstract class AbstractFSContractTestBase extends Assert
* Get the log of the base class. * Get the log of the base class.
* @return a logger * @return a logger
*/ */
public static Logger getLog() { public static Logger getLogger() {
return LOG; return LOG;
} }
@ -281,7 +281,7 @@ public abstract class AbstractFSContractTestBase extends Assert
* @param e exception raised. * @param e exception raised.
*/ */
protected void handleExpectedException(Exception e) { protected void handleExpectedException(Exception e) {
getLog().debug("expected :{}" ,e, e); getLogger().debug("expected :{}" ,e, e);
} }
/** /**
@ -366,7 +366,7 @@ public abstract class AbstractFSContractTestBase extends Assert
protected String generateAndLogErrorListing(Path src, Path dst) throws protected String generateAndLogErrorListing(Path src, Path dst) throws
IOException { IOException {
FileSystem fs = getFileSystem(); FileSystem fs = getFileSystem();
getLog().error( getLogger().error(
"src dir " + ContractTestUtils.ls(fs, src.getParent())); "src dir " + ContractTestUtils.ls(fs, src.getParent()));
String destDirLS = ContractTestUtils.ls(fs, dst.getParent()); String destDirLS = ContractTestUtils.ls(fs, dst.getParent());
if (fs.isDirectory(dst)) { if (fs.isDirectory(dst)) {

View File

@ -177,4 +177,4 @@ public class TestNameNodeMetadataConsistency {
} }
}, SCAN_WAIT * 1000, 60000); }, SCAN_WAIT * 1000, 60000);
} }
} }

View File

@ -25,8 +25,8 @@ import java.util.Queue;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSExceptionMessages; import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.fs.FSInputStream; import org.apache.hadoop.fs.FSInputStream;
@ -40,7 +40,7 @@ import static org.apache.hadoop.fs.aliyun.oss.Constants.*;
* stream. * stream.
*/ */
public class AliyunOSSInputStream extends FSInputStream { public class AliyunOSSInputStream extends FSInputStream {
public static final Log LOG = LogFactory.getLog(AliyunOSSInputStream.class); public static final Logger LOG = LoggerFactory.getLogger(AliyunOSSInputStream.class);
private final long downloadPartSize; private final long downloadPartSize;
private AliyunOSSFileSystemStore store; private AliyunOSSFileSystemStore store;
private final String key; private final String key;

View File

@ -27,8 +27,8 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.io.output.FileWriterWithEncoding; import org.apache.commons.io.output.FileWriterWithEncoding;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -70,7 +70,7 @@ import java.util.Set;
* {@link HadoopArchiveLogsRunner}. * {@link HadoopArchiveLogsRunner}.
*/ */
public class HadoopArchiveLogs implements Tool { public class HadoopArchiveLogs implements Tool {
private static final Log LOG = LogFactory.getLog(HadoopArchiveLogs.class); private static final Logger LOG = LoggerFactory.getLogger(HadoopArchiveLogs.class);
private static final String HELP_OPTION = "help"; private static final String HELP_OPTION = "help";
private static final String MAX_ELIGIBLE_APPS_OPTION = "maxEligibleApps"; private static final String MAX_ELIGIBLE_APPS_OPTION = "maxEligibleApps";

View File

@ -25,8 +25,8 @@ import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -47,8 +47,8 @@ import java.security.PrivilegedExceptionAction;
* tool via the Distributed Shell. It's not meant to be run directly. * tool via the Distributed Shell. It's not meant to be run directly.
*/ */
public class HadoopArchiveLogsRunner implements Tool { public class HadoopArchiveLogsRunner implements Tool {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(HadoopArchiveLogsRunner.class); LoggerFactory.getLogger(HadoopArchiveLogsRunner.class);
private static final String APP_ID_OPTION = "appId"; private static final String APP_ID_OPTION = "appId";
private static final String USER_OPTION = "user"; private static final String USER_OPTION = "user";

View File

@ -37,8 +37,8 @@ import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.Parser; import org.apache.commons.cli.Parser;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -82,7 +82,7 @@ import com.google.common.base.Charsets;
*/ */
public class HadoopArchives implements Tool { public class HadoopArchives implements Tool {
public static final int VERSION = 3; public static final int VERSION = 3;
private static final Log LOG = LogFactory.getLog(HadoopArchives.class); private static final Logger LOG = LoggerFactory.getLogger(HadoopArchives.class);
private static final String NAME = "har"; private static final String NAME = "har";
private static final String ARCHIVE_NAME = "archiveName"; private static final String ARCHIVE_NAME = "archiveName";

View File

@ -41,7 +41,7 @@ public class ITestS3AContractGetFileStatus
@Override @Override
public void teardown() throws Exception { public void teardown() throws Exception {
getLog().info("FS details {}", getFileSystem()); getLogger().info("FS details {}", getFileSystem());
super.teardown(); super.teardown();
} }

View File

@ -41,7 +41,7 @@ public class ITestS3AContractGetFileStatusV1List
@Override @Override
public void teardown() throws Exception { public void teardown() throws Exception {
getLog().info("FS details {}", getFileSystem()); getLogger().info("FS details {}", getFileSystem());
super.teardown(); super.teardown();
} }

View File

@ -23,8 +23,8 @@ import java.util.TreeMap;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Iterator; import java.util.Iterator;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.Reducer; import org.apache.hadoop.mapred.Reducer;
@ -36,7 +36,7 @@ import org.apache.hadoop.mapred.Reducer;
*/ */
public abstract class JobBase implements Mapper, Reducer { public abstract class JobBase implements Mapper, Reducer {
public static final Log LOG = LogFactory.getLog("datajoin.job"); public static final Logger LOG = LoggerFactory.getLogger("datajoin.job");
private SortedMap<Object, Long> longCounters = null; private SortedMap<Object, Long> longCounters = null;

View File

@ -27,8 +27,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.tools.util.DistCpUtils; import org.apache.hadoop.tools.util.DistCpUtils;
import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.Credentials;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
@ -48,7 +48,7 @@ import com.google.common.collect.Sets;
public abstract class CopyListing extends Configured { public abstract class CopyListing extends Configured {
private Credentials credentials; private Credentials credentials;
static final Log LOG = LogFactory.getLog(DistCp.class); static final Logger LOG = LoggerFactory.getLogger(DistCp.class);
/** /**
* Build listing function creates the input listing that distcp uses to * Build listing function creates the input listing that distcp uses to
* perform the copy. * perform the copy.

View File

@ -22,8 +22,8 @@ import java.io.IOException;
import java.util.Random; import java.util.Random;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -64,7 +64,7 @@ public class DistCp extends Configured implements Tool {
*/ */
static final int SHUTDOWN_HOOK_PRIORITY = 30; static final int SHUTDOWN_HOOK_PRIORITY = 30;
static final Log LOG = LogFactory.getLog(DistCp.class); static final Logger LOG = LoggerFactory.getLogger(DistCp.class);
@VisibleForTesting @VisibleForTesting
DistCpContext context; DistCpContext context;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools; package org.apache.hadoop.tools;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -35,7 +35,7 @@ import java.util.ArrayList;
* listing-file by "globbing" all specified source paths (wild-cards and all.) * listing-file by "globbing" all specified source paths (wild-cards and all.)
*/ */
public class GlobbedCopyListing extends CopyListing { public class GlobbedCopyListing extends CopyListing {
private static final Log LOG = LogFactory.getLog(GlobbedCopyListing.class); private static final Logger LOG = LoggerFactory.getLogger(GlobbedCopyListing.class);
private final CopyListing simpleListing; private final CopyListing simpleListing;
/** /**

View File

@ -29,8 +29,8 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
@ -41,7 +41,7 @@ import com.google.common.base.Preconditions;
*/ */
public class OptionsParser { public class OptionsParser {
static final Log LOG = LogFactory.getLog(OptionsParser.class); static final Logger LOG = LoggerFactory.getLogger(OptionsParser.class);
private static final Options cliOptions = new Options(); private static final Options cliOptions = new Options();

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools; package org.apache.hadoop.tools;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
@ -43,7 +43,7 @@ import com.google.common.annotations.VisibleForTesting;
*/ */
public class RegexCopyFilter extends CopyFilter { public class RegexCopyFilter extends CopyFilter {
private static final Log LOG = LogFactory.getLog(RegexCopyFilter.class); private static final Logger LOG = LoggerFactory.getLogger(RegexCopyFilter.class);
private File filtersFile; private File filtersFile;
private List<Pattern> filters; private List<Pattern> filters;
@ -77,7 +77,7 @@ public class RegexCopyFilter extends CopyFilter {
LOG.error("An error occurred while attempting to read from " + LOG.error("An error occurred while attempting to read from " +
filtersFile); filtersFile);
} finally { } finally {
IOUtils.cleanup(LOG, reader); IOUtils.cleanupWithLogger(LOG, reader);
} }
} }

View File

@ -20,8 +20,8 @@ package org.apache.hadoop.tools;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -60,7 +60,7 @@ import static org.apache.hadoop.tools.DistCpConstants
* Note: The SimpleCopyListing doesn't handle wild-cards in the input-paths. * Note: The SimpleCopyListing doesn't handle wild-cards in the input-paths.
*/ */
public class SimpleCopyListing extends CopyListing { public class SimpleCopyListing extends CopyListing {
private static final Log LOG = LogFactory.getLog(SimpleCopyListing.class); private static final Logger LOG = LoggerFactory.getLogger(SimpleCopyListing.class);
public static final int DEFAULT_FILE_STATUS_SIZE = 1000; public static final int DEFAULT_FILE_STATUS_SIZE = 1000;
public static final boolean DEFAULT_RANDOMIZE_FILE_LISTING = true; public static final boolean DEFAULT_RANDOMIZE_FILE_LISTING = true;
@ -309,7 +309,7 @@ public class SimpleCopyListing extends CopyListing {
fileListWriter.close(); fileListWriter.close();
fileListWriter = null; fileListWriter = null;
} finally { } finally {
IOUtils.cleanup(LOG, fileListWriter); IOUtils.cleanupWithLogger(LOG, fileListWriter);
} }
} }
@ -402,7 +402,7 @@ public class SimpleCopyListing extends CopyListing {
LOG.info("Build file listing completed."); LOG.info("Build file listing completed.");
fileListWriter = null; fileListWriter = null;
} finally { } finally {
IOUtils.cleanup(LOG, fileListWriter); IOUtils.cleanupWithLogger(LOG, fileListWriter);
} }
} }

View File

@ -23,8 +23,8 @@ import java.io.IOException;
import java.util.EnumSet; import java.util.EnumSet;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -74,7 +74,7 @@ public class CopyMapper extends Mapper<Text, CopyListingFileStatus, Text, Text>
OVERWRITE, // Overwrite the whole file OVERWRITE, // Overwrite the whole file
} }
private static Log LOG = LogFactory.getLog(CopyMapper.class); private static Logger LOG = LoggerFactory.getLogger(CopyMapper.class);
private Configuration conf; private Configuration conf;

View File

@ -23,8 +23,8 @@ import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.EnumSet; import java.util.EnumSet;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
@ -53,7 +53,7 @@ import com.google.common.annotations.VisibleForTesting;
*/ */
public class RetriableFileCopyCommand extends RetriableCommand { public class RetriableFileCopyCommand extends RetriableCommand {
private static Log LOG = LogFactory.getLog(RetriableFileCopyCommand.class); private static Logger LOG = LoggerFactory.getLogger(RetriableFileCopyCommand.class);
private boolean skipCrc = false; private boolean skipCrc = false;
private FileAction action; private FileAction action;
@ -297,7 +297,7 @@ public class RetriableFileCopyCommand extends RetriableCommand {
outStream.close(); outStream.close();
outStream = null; outStream = null;
} finally { } finally {
IOUtils.cleanup(LOG, outStream, inStream); IOUtils.cleanupWithLogger(LOG, outStream, inStream);
} }
return totalBytesRead; return totalBytesRead;
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools.mapred; package org.apache.hadoop.tools.mapred;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
@ -50,8 +50,8 @@ import java.util.ArrayList;
*/ */
public class UniformSizeInputFormat public class UniformSizeInputFormat
extends InputFormat<Text, CopyListingFileStatus> { extends InputFormat<Text, CopyListingFileStatus> {
private static final Log LOG private static final Logger LOG
= LogFactory.getLog(UniformSizeInputFormat.class); = LoggerFactory.getLogger(UniformSizeInputFormat.class);
/** /**
* Implementation of InputFormat::getSplits(). Returns a list of InputSplits, * Implementation of InputFormat::getSplits(). Returns a list of InputSplits,

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.tools.mapred.lib; package org.apache.hadoop.tools.mapred.lib;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
@ -42,7 +42,7 @@ import java.io.IOException;
* consumed. * consumed.
*/ */
class DynamicInputChunk<K, V> { class DynamicInputChunk<K, V> {
private static Log LOG = LogFactory.getLog(DynamicInputChunk.class); private static Logger LOG = LoggerFactory.getLogger(DynamicInputChunk.class);
private Path chunkFilePath; private Path chunkFilePath;
private SequenceFileRecordReader<K, V> reader; private SequenceFileRecordReader<K, V> reader;
private SequenceFile.Writer writer; private SequenceFile.Writer writer;
@ -78,7 +78,7 @@ class DynamicInputChunk<K, V> {
* Closes streams opened to the chunk-file. * Closes streams opened to the chunk-file.
*/ */
public void close() { public void close() {
IOUtils.cleanup(LOG, reader, writer); IOUtils.cleanupWithLogger(LOG, reader, writer);
} }
/** /**

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools.mapred.lib; package org.apache.hadoop.tools.mapred.lib;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -34,7 +34,7 @@ import java.io.IOException;
*/ */
class DynamicInputChunkContext<K, V> { class DynamicInputChunkContext<K, V> {
private static Log LOG = LogFactory.getLog(DynamicInputChunkContext.class); private static Logger LOG = LoggerFactory.getLogger(DynamicInputChunkContext.class);
private Configuration configuration; private Configuration configuration;
private Path chunkRootPath = null; private Path chunkRootPath = null;
private String chunkFilePrefix; private String chunkFilePrefix;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools.mapred.lib; package org.apache.hadoop.tools.mapred.lib;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.tools.DistCpConstants; import org.apache.hadoop.tools.DistCpConstants;
@ -49,7 +49,7 @@ import java.io.IOException;
* performance characteristics. * performance characteristics.
*/ */
public class DynamicInputFormat<K, V> extends InputFormat<K, V> { public class DynamicInputFormat<K, V> extends InputFormat<K, V> {
private static final Log LOG = LogFactory.getLog(DynamicInputFormat.class); private static final Logger LOG = LoggerFactory.getLogger(DynamicInputFormat.class);
private static final String CONF_LABEL_LISTING_SPLIT_RATIO private static final String CONF_LABEL_LISTING_SPLIT_RATIO
= "mapred.listing.split.ratio"; = "mapred.listing.split.ratio";

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools.mapred.lib; package org.apache.hadoop.tools.mapred.lib;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.tools.util.DistCpUtils; import org.apache.hadoop.tools.util.DistCpUtils;
import org.apache.hadoop.tools.DistCpConstants; import org.apache.hadoop.tools.DistCpConstants;
import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.*;
@ -37,7 +37,7 @@ import java.util.concurrent.TimeUnit;
* transparently. * transparently.
*/ */
public class DynamicRecordReader<K, V> extends RecordReader<K, V> { public class DynamicRecordReader<K, V> extends RecordReader<K, V> {
private static final Log LOG = LogFactory.getLog(DynamicRecordReader.class); private static final Logger LOG = LoggerFactory.getLogger(DynamicRecordReader.class);
private TaskAttemptContext taskAttemptContext; private TaskAttemptContext taskAttemptContext;
private Configuration configuration; private Configuration configuration;
private DynamicInputChunk<K, V> chunk; private DynamicInputChunk<K, V> chunk;

View File

@ -20,8 +20,8 @@ package org.apache.hadoop.tools.util;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileChecksum;
@ -56,7 +56,7 @@ import java.util.Map.Entry;
*/ */
public class DistCpUtils { public class DistCpUtils {
private static final Log LOG = LogFactory.getLog(DistCpUtils.class); private static final Logger LOG = LoggerFactory.getLogger(DistCpUtils.class);
/** /**
* Retrieves size of the file at the specified path. * Retrieves size of the file at the specified path.

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools.util; package org.apache.hadoop.tools.util;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
@ -34,7 +34,7 @@ import java.util.concurrent.atomic.AtomicInteger;
* WorkReport{@literal <R>} to the outputQueue. * WorkReport{@literal <R>} to the outputQueue.
*/ */
public class ProducerConsumer<T, R> { public class ProducerConsumer<T, R> {
private Log LOG = LogFactory.getLog(ProducerConsumer.class); private Logger LOG = LoggerFactory.getLogger(ProducerConsumer.class);
private LinkedBlockingQueue<WorkRequest<T>> inputQueue; private LinkedBlockingQueue<WorkRequest<T>> inputQueue;
private LinkedBlockingQueue<WorkReport<R>> outputQueue; private LinkedBlockingQueue<WorkReport<R>> outputQueue;
private ExecutorService executor; private ExecutorService executor;

View File

@ -19,8 +19,8 @@
package org.apache.hadoop.tools.util; package org.apache.hadoop.tools.util;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.io.retry.RetryPolicy.RetryAction; import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryPolicies;
@ -35,7 +35,7 @@ import java.util.concurrent.TimeUnit;
*/ */
public abstract class RetriableCommand { public abstract class RetriableCommand {
private static Log LOG = LogFactory.getLog(RetriableCommand.class); private static Logger LOG = LoggerFactory.getLogger(RetriableCommand.class);
private static final long DELAY_MILLISECONDS = 500; private static final long DELAY_MILLISECONDS = 500;
private static final int MAX_RETRIES = 3; private static final int MAX_RETRIES = 3;

View File

@ -20,8 +20,8 @@ package org.apache.hadoop.tools;
import static org.mockito.Mockito.*; import static org.mockito.Mockito.*;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -51,7 +51,7 @@ import java.util.Random;
@RunWith(value = Parameterized.class) @RunWith(value = Parameterized.class)
public class TestCopyListing extends SimpleCopyListing { public class TestCopyListing extends SimpleCopyListing {
private static final Log LOG = LogFactory.getLog(TestCopyListing.class); private static final Logger LOG = LoggerFactory.getLogger(TestCopyListing.class);
private static final Credentials CREDENTIALS = new Credentials(); private static final Credentials CREDENTIALS = new Credentials();

View File

@ -31,8 +31,8 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -57,8 +57,8 @@ import org.junit.rules.Timeout;
*/ */
public class TestDistCpSystem { public class TestDistCpSystem {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(TestDistCpSystem.class); LoggerFactory.getLogger(TestDistCpSystem.class);
@Rule @Rule
public Timeout globalTimeout = new Timeout(30000); public Timeout globalTimeout = new Timeout(30000);

View File

@ -18,9 +18,9 @@
package org.apache.hadoop.tools; package org.apache.hadoop.tools;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.hadoop.fs.viewfs.*; import org.apache.hadoop.fs.viewfs.*;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -37,7 +37,7 @@ import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
public class TestDistCpViewFs { public class TestDistCpViewFs {
private static final Log LOG = LogFactory.getLog(TestDistCpViewFs.class); private static final Logger LOG = LoggerFactory.getLogger(TestDistCpViewFs.class);
private static FileSystem fs; private static FileSystem fs;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools; package org.apache.hadoop.tools;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -37,7 +37,7 @@ import java.security.Permission;
public class TestExternalCall { public class TestExternalCall {
private static final Log LOG = LogFactory.getLog(TestExternalCall.class); private static final Logger LOG = LoggerFactory.getLogger(TestExternalCall.class);
private static FileSystem fs; private static FileSystem fs;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools; package org.apache.hadoop.tools;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -40,7 +40,7 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public class TestFileBasedCopyListing { public class TestFileBasedCopyListing {
private static final Log LOG = LogFactory.getLog(TestFileBasedCopyListing.class); private static final Logger LOG = LoggerFactory.getLogger(TestFileBasedCopyListing.class);
private static final Credentials CREDENTIALS = new Credentials(); private static final Credentials CREDENTIALS = new Credentials();

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools; package org.apache.hadoop.tools;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -43,7 +43,7 @@ import java.util.List;
@RunWith(value = Parameterized.class) @RunWith(value = Parameterized.class)
public class TestIntegration { public class TestIntegration {
private static final Log LOG = LogFactory.getLog(TestIntegration.class); private static final Logger LOG = LoggerFactory.getLogger(TestIntegration.class);
private static FileSystem fs; private static FileSystem fs;

View File

@ -523,7 +523,7 @@ public abstract class AbstractContractDistCpTest
int fileSizeKb = conf.getInt(SCALE_TEST_DISTCP_FILE_SIZE_KB, int fileSizeKb = conf.getInt(SCALE_TEST_DISTCP_FILE_SIZE_KB,
DEFAULT_DISTCP_SIZE_KB); DEFAULT_DISTCP_SIZE_KB);
int fileSizeMb = fileSizeKb / 1024; int fileSizeMb = fileSizeKb / 1024;
getLog().info("{} with file size {}", testName.getMethodName(), fileSizeMb); getLogger().info("{} with file size {}", testName.getMethodName(), fileSizeMb);
byte[] data1 = dataset((fileSizeMb + 1) * MB, 33, 43); byte[] data1 = dataset((fileSizeMb + 1) * MB, 33, 43);
createFile(srcFS, largeFile1, true, data1); createFile(srcFS, largeFile1, true, data1);
byte[] data2 = dataset((fileSizeMb + 2) * MB, 43, 53); byte[] data2 = dataset((fileSizeMb + 2) * MB, 43, 53);

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools.mapred; package org.apache.hadoop.tools.mapred;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -47,7 +47,7 @@ import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
import static org.apache.hadoop.tools.util.TestDistCpUtils.*; import static org.apache.hadoop.tools.util.TestDistCpUtils.*;
public class TestCopyCommitter { public class TestCopyCommitter {
private static final Log LOG = LogFactory.getLog(TestCopyCommitter.class); private static final Logger LOG = LoggerFactory.getLogger(TestCopyCommitter.class);
private static final Random rand = new Random(); private static final Random rand = new Random();

View File

@ -27,8 +27,8 @@ import java.util.EnumSet;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -62,7 +62,7 @@ import static org.apache.hadoop.test.MetricsAsserts.getLongCounter;
import static org.apache.hadoop.test.MetricsAsserts.getMetrics; import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
public class TestCopyMapper { public class TestCopyMapper {
private static final Log LOG = LogFactory.getLog(TestCopyMapper.class); private static final Logger LOG = LoggerFactory.getLogger(TestCopyMapper.class);
private static List<Path> pathList = new ArrayList<Path>(); private static List<Path> pathList = new ArrayList<Path>();
private static int nFiles = 0; private static int nFiles = 0;
private static final int DEFAULT_FILE_SIZE = 1024; private static final int DEFAULT_FILE_SIZE = 1024;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools.mapred; package org.apache.hadoop.tools.mapred;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.task.JobContextImpl; import org.apache.hadoop.mapreduce.task.JobContextImpl;
@ -32,7 +32,7 @@ import org.junit.Assert;
import java.io.IOException; import java.io.IOException;
public class TestCopyOutputFormat { public class TestCopyOutputFormat {
private static final Log LOG = LogFactory.getLog(TestCopyOutputFormat.class); private static final Logger LOG = LoggerFactory.getLogger(TestCopyOutputFormat.class);
@Test @Test
public void testSetCommitDirectory() { public void testSetCommitDirectory() {

View File

@ -21,8 +21,8 @@ package org.apache.hadoop.tools.mapred.lib;
import org.apache.hadoop.tools.DistCpConstants; import org.apache.hadoop.tools.DistCpConstants;
import org.apache.hadoop.tools.DistCpContext; import org.apache.hadoop.tools.DistCpContext;
import org.junit.Assert; import org.junit.Assert;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -46,7 +46,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
public class TestDynamicInputFormat { public class TestDynamicInputFormat {
private static final Log LOG = LogFactory.getLog(TestDynamicInputFormat.class); private static final Logger LOG = LoggerFactory.getLogger(TestDynamicInputFormat.class);
private static MiniDFSCluster cluster; private static MiniDFSCluster cluster;
private static final int N_FILES = 1000; private static final int N_FILES = 1000;
private static final int NUM_SPLITS = 7; private static final int NUM_SPLITS = 7;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools.util; package org.apache.hadoop.tools.util;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -50,7 +50,7 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
public class TestDistCpUtils { public class TestDistCpUtils {
private static final Log LOG = LogFactory.getLog(TestDistCpUtils.class); private static final Logger LOG = LoggerFactory.getLogger(TestDistCpUtils.class);
private static final Configuration config = new Configuration(); private static final Configuration config = new Configuration();
private static MiniDFSCluster cluster; private static MiniDFSCluster cluster;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.tools.util; package org.apache.hadoop.tools.util;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -27,7 +27,7 @@ import org.junit.Test;
import java.io.*; import java.io.*;
public class TestThrottledInputStream { public class TestThrottledInputStream {
private static final Log LOG = LogFactory.getLog(TestThrottledInputStream.class); private static final Logger LOG = LoggerFactory.getLogger(TestThrottledInputStream.class);
private static final int BUFF_SIZE = 1024; private static final int BUFF_SIZE = 1024;
private enum CB {ONE_C, BUFFER, BUFF_OFFSET} private enum CB {ONE_C, BUFFER, BUFF_OFFSET}
@ -89,7 +89,7 @@ public class TestThrottledInputStream {
copyByteByByte(in, out); copyByteByByte(in, out);
} }
LOG.info(in); LOG.info("{}", in);
bandwidth = in.getBytesPerSec(); bandwidth = in.getBytesPerSec();
Assert.assertEquals(in.getTotalBytesRead(), tmpFile.length()); Assert.assertEquals(in.getTotalBytesRead(), tmpFile.length());
Assert.assertTrue(in.getBytesPerSec() > maxBandwidth / (factor * 1.2)); Assert.assertTrue(in.getBytesPerSec() > maxBandwidth / (factor * 1.2));

View File

@ -27,8 +27,8 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -40,7 +40,7 @@ import org.apache.hadoop.mapred.JobConf;
* An abstract class for distributed tool for file related operations. * An abstract class for distributed tool for file related operations.
*/ */
abstract class DistTool implements org.apache.hadoop.util.Tool { abstract class DistTool implements org.apache.hadoop.util.Tool {
protected static final Log LOG = LogFactory.getLog(DistTool.class); protected static final Logger LOG = LoggerFactory.getLogger(DistTool.class);
protected JobConf jobconf; protected JobConf jobconf;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.lang3.time.FastDateFormat; import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -40,7 +40,7 @@ import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
* addresses are also recorded in the summary. * addresses are also recorded in the summary.
*/ */
class ClusterSummarizer implements StatListener<ClusterStats> { class ClusterSummarizer implements StatListener<ClusterStats> {
static final Log LOG = LogFactory.getLog(ClusterSummarizer.class); static final Logger LOG = LoggerFactory.getLogger(ClusterSummarizer.class);
private int numBlacklistedTrackers; private int numBlacklistedTrackers;
private int numActiveTrackers; private int numActiveTrackers;

View File

@ -25,8 +25,8 @@ import java.nio.charset.Charset;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -58,7 +58,7 @@ import org.apache.hadoop.util.StringUtils;
* This is a utility class for all the compression related modules. * This is a utility class for all the compression related modules.
*/ */
class CompressionEmulationUtil { class CompressionEmulationUtil {
static final Log LOG = LogFactory.getLog(CompressionEmulationUtil.class); static final Logger LOG = LoggerFactory.getLogger(CompressionEmulationUtil.class);
/** /**
* Enable compression usage in GridMix runs. * Enable compression usage in GridMix runs.

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -83,8 +83,8 @@ import java.util.Map;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving
class DistributedCacheEmulator { class DistributedCacheEmulator {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(DistributedCacheEmulator.class); LoggerFactory.getLogger(DistributedCacheEmulator.class);
static final long AVG_BYTES_PER_MAP = 128 * 1024 * 1024L;// 128MB static final long AVG_BYTES_PER_MAP = 128 * 1024 * 1024L;// 128MB

View File

@ -22,14 +22,14 @@ import java.net.URI;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
/** /**
* Echos the UGI offered. * Echos the UGI offered.
*/ */
public class EchoUserResolver implements UserResolver { public class EchoUserResolver implements UserResolver {
public static final Log LOG = LogFactory.getLog(Gridmix.class); public static final Logger LOG = LoggerFactory.getLogger(Gridmix.class);
public EchoUserResolver() { public EchoUserResolver() {
LOG.info(" Current user resolver is EchoUserResolver "); LOG.info(" Current user resolver is EchoUserResolver ");

View File

@ -20,8 +20,8 @@ package org.apache.hadoop.mapred.gridmix;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.lang3.time.FastDateFormat; import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -47,7 +47,7 @@ import org.apache.hadoop.util.StringUtils;
* </ul> * </ul>
*/ */
class ExecutionSummarizer implements StatListener<JobStats> { class ExecutionSummarizer implements StatListener<JobStats> {
static final Log LOG = LogFactory.getLog(ExecutionSummarizer.class); static final Logger LOG = LoggerFactory.getLogger(ExecutionSummarizer.class);
private static final FastDateFormat UTIL = FastDateFormat.getInstance(); private static final FastDateFormat UTIL = FastDateFormat.getInstance();
private int numJobsInInputTrace; private int numJobsInInputTrace;

View File

@ -37,8 +37,8 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapred.gridmix.RandomAlgorithms.Selector; import org.apache.hadoop.mapred.gridmix.RandomAlgorithms.Selector;
/** /**
@ -47,7 +47,7 @@ import org.apache.hadoop.mapred.gridmix.RandomAlgorithms.Selector;
*/ */
class FilePool { class FilePool {
public static final Log LOG = LogFactory.getLog(FilePool.class); public static final Logger LOG = LoggerFactory.getLogger(FilePool.class);
/** /**
* The minimum file size added to the pool. Default 128MiB. * The minimum file size added to the pool. Default 128MiB.

View File

@ -49,8 +49,8 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.tools.rumen.JobStory; import org.apache.hadoop.tools.rumen.JobStory;
import static org.apache.hadoop.tools.rumen.datatypes.util.MapReduceJobPropertiesParser.extractMaxHeapOpts; import static org.apache.hadoop.tools.rumen.datatypes.util.MapReduceJobPropertiesParser.extractMaxHeapOpts;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
/** /**
* Synthetic job generated from a trace description. * Synthetic job generated from a trace description.
@ -59,7 +59,7 @@ abstract class GridmixJob implements Callable<Job>, Delayed {
// Gridmix job name format is GRIDMIX<6 digit sequence number> // Gridmix job name format is GRIDMIX<6 digit sequence number>
public static final String JOB_NAME_PREFIX = "GRIDMIX"; public static final String JOB_NAME_PREFIX = "GRIDMIX";
public static final Log LOG = LogFactory.getLog(GridmixJob.class); public static final Logger LOG = LoggerFactory.getLogger(GridmixJob.class);
private static final ThreadLocal<Formatter> nameFormat = private static final ThreadLocal<Formatter> nameFormat =
new ThreadLocal<Formatter>() { new ThreadLocal<Formatter>() {

View File

@ -33,15 +33,15 @@ import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit; import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
/** /**
* Given a {@link #FilePool}, obtain a set of files capable of satisfying * Given a {@link #FilePool}, obtain a set of files capable of satisfying
* a full set of splits, then iterate over each source to fill the request. * a full set of splits, then iterate over each source to fill the request.
*/ */
class InputStriper { class InputStriper {
public static final Log LOG = LogFactory.getLog(InputStriper.class); public static final Logger LOG = LoggerFactory.getLogger(InputStriper.class);
int idx; int idx;
long currentStart; long currentStart;
FileStatus current; FileStatus current;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
@ -52,7 +52,7 @@ import java.util.concurrent.atomic.AtomicInteger;
*/ */
abstract class JobFactory<T> implements Gridmix.Component<Void>,StatListener<T> { abstract class JobFactory<T> implements Gridmix.Component<Void>,StatListener<T> {
public static final Log LOG = LogFactory.getLog(JobFactory.class); public static final Logger LOG = LoggerFactory.getLogger(JobFactory.class);
protected final Path scratch; protected final Path scratch;
protected final float rateFactor; protected final float rateFactor;

View File

@ -29,8 +29,8 @@ import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapred.gridmix.Statistics.JobStats; import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
@ -54,7 +54,7 @@ import org.apache.hadoop.mapreduce.JobStatus;
*/ */
class JobMonitor implements Gridmix.Component<JobStats> { class JobMonitor implements Gridmix.Component<JobStats> {
public static final Log LOG = LogFactory.getLog(JobMonitor.class); public static final Logger LOG = LoggerFactory.getLogger(JobMonitor.class);
private final Queue<JobStats> mJobs; private final Queue<JobStats> mJobs;
private ExecutorService executor; private ExecutorService executor;

View File

@ -26,8 +26,8 @@ import java.util.concurrent.Semaphore;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapred.gridmix.Statistics.JobStats; import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
/** /**
@ -39,7 +39,7 @@ import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
*/ */
class JobSubmitter implements Gridmix.Component<GridmixJob> { class JobSubmitter implements Gridmix.Component<GridmixJob> {
public static final Log LOG = LogFactory.getLog(JobSubmitter.class); public static final Logger LOG = LoggerFactory.getLogger(JobSubmitter.class);
private final Semaphore sem; private final Semaphore sem;
private final Statistics statistics; private final Statistics statistics;

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
@ -54,7 +54,7 @@ import java.util.Random;
*/ */
class LoadJob extends GridmixJob { class LoadJob extends GridmixJob {
public static final Log LOG = LogFactory.getLog(LoadJob.class); public static final Logger LOG = LoggerFactory.getLogger(LoadJob.class);
public LoadJob(final Configuration conf, long submissionMillis, public LoadJob(final Configuration conf, long submissionMillis,
final JobStory jobdesc, Path outRoot, UserGroupInformation ugi, final JobStory jobdesc, Path outRoot, UserGroupInformation ugi,

View File

@ -22,15 +22,15 @@ import java.util.List;
import java.util.Random; import java.util.Random;
import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
/** /**
* A random text generator. The words are simply sequences of alphabets. * A random text generator. The words are simply sequences of alphabets.
*/ */
class RandomTextDataGenerator { class RandomTextDataGenerator {
static final Log LOG = LogFactory.getLog(RandomTextDataGenerator.class); static final Logger LOG = LoggerFactory.getLogger(RandomTextDataGenerator.class);
/** /**
* Configuration key for random text data generator's list size. * Configuration key for random text data generator's list size.

View File

@ -23,15 +23,15 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.tools.rumen.JobStory; import org.apache.hadoop.tools.rumen.JobStory;
import org.apache.hadoop.tools.rumen.JobStoryProducer; import org.apache.hadoop.tools.rumen.JobStoryProducer;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
class ReplayJobFactory extends JobFactory<Statistics.ClusterStats> { class ReplayJobFactory extends JobFactory<Statistics.ClusterStats> {
public static final Log LOG = LogFactory.getLog(ReplayJobFactory.class); public static final Logger LOG = LoggerFactory.getLogger(ReplayJobFactory.class);
/** /**
* Creating a new instance does not start the thread. * Creating a new instance does not start the thread.

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -34,7 +34,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
public class RoundRobinUserResolver implements UserResolver { public class RoundRobinUserResolver implements UserResolver {
public static final Log LOG = LogFactory.getLog(RoundRobinUserResolver.class); public static final Logger LOG = LoggerFactory.getLogger(RoundRobinUserResolver.class);
private int uidx = 0; private int uidx = 0;
private List<UserGroupInformation> users = Collections.emptyList(); private List<UserGroupInformation> users = Collections.emptyList();

View File

@ -24,8 +24,8 @@ import org.apache.hadoop.tools.rumen.JobStory;
import org.apache.hadoop.tools.rumen.JobStoryProducer; import org.apache.hadoop.tools.rumen.JobStoryProducer;
import org.apache.hadoop.mapred.gridmix.Statistics.JobStats; import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
@ -33,7 +33,7 @@ import java.util.concurrent.locks.Condition;
public class SerialJobFactory extends JobFactory<JobStats> { public class SerialJobFactory extends JobFactory<JobStats> {
public static final Log LOG = LogFactory.getLog(SerialJobFactory.class); public static final Logger LOG = LoggerFactory.getLogger(SerialJobFactory.class);
private final Condition jobCompleted = lock.newCondition(); private final Condition jobCompleted = lock.newCondition();
/** /**

View File

@ -50,12 +50,12 @@ import org.apache.hadoop.tools.rumen.JobStory;
import org.apache.hadoop.tools.rumen.ReduceTaskAttemptInfo; import org.apache.hadoop.tools.rumen.ReduceTaskAttemptInfo;
import org.apache.hadoop.tools.rumen.TaskAttemptInfo; import org.apache.hadoop.tools.rumen.TaskAttemptInfo;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
public class SleepJob extends GridmixJob { public class SleepJob extends GridmixJob {
public static final Log LOG = LogFactory.getLog(SleepJob.class); public static final Logger LOG = LoggerFactory.getLogger(SleepJob.class);
private static final ThreadLocal <Random> rand = private static final ThreadLocal <Random> rand =
new ThreadLocal <Random> () { new ThreadLocal <Random> () {
@Override protected Random initialValue() { @Override protected Random initialValue() {

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.ClusterStatus; import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobClient;
@ -50,7 +50,7 @@ import java.util.concurrent.locks.ReentrantLock;
* notified either on every job completion event or some fixed time interval. * notified either on every job completion event or some fixed time interval.
*/ */
public class Statistics implements Component<Statistics.JobStats> { public class Statistics implements Component<Statistics.JobStats> {
public static final Log LOG = LogFactory.getLog(Statistics.class); public static final Logger LOG = LoggerFactory.getLogger(Statistics.class);
private final StatCollector statistics = new StatCollector(); private final StatCollector statistics = new StatCollector();
private JobClient cluster; private JobClient cluster;

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
@ -38,7 +38,7 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
public class StressJobFactory extends JobFactory<Statistics.ClusterStats> { public class StressJobFactory extends JobFactory<Statistics.ClusterStats> {
public static final Log LOG = LogFactory.getLog(StressJobFactory.class); public static final Logger LOG = LoggerFactory.getLogger(StressJobFactory.class);
private final LoadStatus loadStatus = new LoadStatus(); private final LoadStatus loadStatus = new LoadStatus();
/** /**

View File

@ -21,14 +21,14 @@ import java.io.IOException;
import java.net.URI; import java.net.URI;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
/** /**
* Resolves all UGIs to the submitting user. * Resolves all UGIs to the submitting user.
*/ */
public class SubmitterUserResolver implements UserResolver { public class SubmitterUserResolver implements UserResolver {
public static final Log LOG = LogFactory.getLog(SubmitterUserResolver.class); public static final Logger LOG = LoggerFactory.getLogger(SubmitterUserResolver.class);
private UserGroupInformation ugi = null; private UserGroupInformation ugi = null;

View File

@ -33,8 +33,8 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -57,7 +57,7 @@ import org.apache.hadoop.tools.rumen.JobStory;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
public class CommonJobTest { public class CommonJobTest {
public static final Log LOG = LogFactory.getLog(Gridmix.class); public static final Logger LOG = LoggerFactory.getLogger(Gridmix.class);
protected static int NJOBS = 2; protected static int NJOBS = 2;
protected static final long GENDATA = 1; // in megabytes protected static final long GENDATA = 1; // in megabytes

View File

@ -40,12 +40,12 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
public class DebugJobProducer implements JobStoryProducer { public class DebugJobProducer implements JobStoryProducer {
public static final Log LOG = LogFactory.getLog(DebugJobProducer.class); public static final Logger LOG = LoggerFactory.getLogger(DebugJobProducer.class);
final ArrayList<JobStory> submitted; final ArrayList<JobStory> submitted;
private final Configuration conf; private final Configuration conf;
private final AtomicInteger numJobs; private final AtomicInteger numJobs;

View File

@ -16,8 +16,8 @@
*/ */
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
@ -35,7 +35,7 @@ import java.io.IOException;
* This is a test class. * This is a test class.
*/ */
public class GridmixTestUtils { public class GridmixTestUtils {
private static final Log LOG = LogFactory.getLog(GridmixTestUtils.class); private static final Logger LOG = LoggerFactory.getLogger(GridmixTestUtils.class);
static final Path DEST = new Path("/gridmix"); static final Path DEST = new Path("/gridmix");
static FileSystem dfs = null; static FileSystem dfs = null;
static MiniDFSCluster dfsCluster = null; static MiniDFSCluster dfsCluster = null;

View File

@ -28,8 +28,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
@ -40,7 +40,7 @@ import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
public class TestFilePool { public class TestFilePool {
static final Log LOG = LogFactory.getLog(TestFileQueue.class); static final Logger LOG = LoggerFactory.getLogger(TestFileQueue.class);
static final int NFILES = 26; static final int NFILES = 26;
static final Path base = getBaseDir(); static final Path base = getBaseDir();

View File

@ -26,8 +26,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -36,7 +36,7 @@ import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
public class TestFileQueue { public class TestFileQueue {
static final Log LOG = LogFactory.getLog(TestFileQueue.class); static final Logger LOG = LoggerFactory.getLogger(TestFileQueue.class);
static final int NFILES = 4; static final int NFILES = 4;
static final int BLOCK = 256; static final int BLOCK = 256;
static final Path[] paths = new Path[NFILES]; static final Path[] paths = new Path[NFILES];

View File

@ -30,8 +30,8 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.CustomOutputCommitter; import org.apache.hadoop.CustomOutputCommitter;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
@ -85,7 +85,7 @@ import static org.mockito.Mockito.*;
import static org.junit.Assert.*; import static org.junit.Assert.*;
public class TestGridMixClasses { public class TestGridMixClasses {
private static final Log LOG = LogFactory.getLog(TestGridMixClasses.class); private static final Logger LOG = LoggerFactory.getLogger(TestGridMixClasses.class);
/* /*
* simple test LoadSplit (getters,copy, write, read...) * simple test LoadSplit (getters,copy, write, read...)

View File

@ -23,8 +23,8 @@ import java.util.Random;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.DataOutputBuffer;
@ -32,7 +32,7 @@ import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
public class TestGridmixRecord { public class TestGridmixRecord {
private static final Log LOG = LogFactory.getLog(TestGridmixRecord.class); private static final Logger LOG = LoggerFactory.getLogger(TestGridmixRecord.class);
static void lengthTest(GridmixRecord x, GridmixRecord y, int min, static void lengthTest(GridmixRecord x, GridmixRecord y, int min,
int max) throws Exception { int max) throws Exception {

View File

@ -22,14 +22,14 @@ import java.util.Random;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.DataOutputBuffer;
public class TestRecordFactory { public class TestRecordFactory {
private static final Log LOG = LogFactory.getLog(TestRecordFactory.class); private static final Logger LOG = LoggerFactory.getLogger(TestRecordFactory.class);
public static void testFactory(long targetBytes, long targetRecs) public static void testFactory(long targetBytes, long targetRecs)
throws Exception { throws Exception {

View File

@ -27,8 +27,8 @@ import java.util.PriorityQueue;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.TreeSet; import java.util.TreeSet;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
public class DeskewedJobTraceReader implements Closeable { public class DeskewedJobTraceReader implements Closeable {
// underlying engine // underlying engine
@ -57,8 +57,8 @@ public class DeskewedJobTraceReader implements Closeable {
private final PriorityQueue<LoggedJob> skewBuffer; private final PriorityQueue<LoggedJob> skewBuffer;
static final private Log LOG = static final private Logger LOG =
LogFactory.getLog(DeskewedJobTraceReader.class); LoggerFactory.getLogger(DeskewedJobTraceReader.class);
static private class JobComparator implements Comparator<LoggedJob>, static private class JobComparator implements Comparator<LoggedJob>,
Serializable { Serializable {

View File

@ -30,8 +30,8 @@ import java.util.Queue;
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
@ -52,7 +52,7 @@ public class Folder extends Configured implements Tool {
private int skewBufferLength = 0; private int skewBufferLength = 0;
private long startsAfter = -1; private long startsAfter = -1;
static final private Log LOG = LogFactory.getLog(Folder.class); static final private Logger LOG = LoggerFactory.getLogger(Folder.class);
private DeskewedJobTraceReader reader = null; private DeskewedJobTraceReader reader = null;
private Outputter<LoggedJob> outGen = null; private Outputter<LoggedJob> outGen = null;

View File

@ -36,8 +36,8 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
@ -139,7 +139,7 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
private Histogram successfulNthReducerAttempts; private Histogram successfulNthReducerAttempts;
private Histogram mapperLocality; private Histogram mapperLocality;
static final private Log LOG = LogFactory.getLog(HadoopLogsAnalyzer.class); static final private Logger LOG = LoggerFactory.getLogger(HadoopLogsAnalyzer.class);
private int[] attemptTimesPercentiles; private int[] attemptTimesPercentiles;

View File

@ -22,13 +22,13 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Queue; import java.util.Queue;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent;
abstract class HistoryEventEmitter { abstract class HistoryEventEmitter {
static final private Log LOG = LogFactory.getLog(HistoryEventEmitter.class); static final private Logger LOG = LoggerFactory.getLogger(HistoryEventEmitter.class);
abstract List<SingleEventEmitter> nonFinalSEEs(); abstract List<SingleEventEmitter> nonFinalSEEs();

View File

@ -26,8 +26,8 @@ import java.util.StringTokenizer;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapred.TaskStatus; import org.apache.hadoop.mapred.TaskStatus;
import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.TaskType;
@ -69,7 +69,7 @@ public class JobBuilder {
private static final long BYTES_IN_MEG = private static final long BYTES_IN_MEG =
StringUtils.TraditionalBinaryPrefix.string2long("1m"); StringUtils.TraditionalBinaryPrefix.string2long("1m");
static final private Log LOG = LogFactory.getLog(JobBuilder.class); static final private Logger LOG = LoggerFactory.getLogger(JobBuilder.class);
private String jobID; private String jobID;

View File

@ -25,8 +25,8 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
@ -37,7 +37,7 @@ import org.apache.hadoop.security.authorize.AccessControlList;
*/ */
public class ParsedJob extends LoggedJob { public class ParsedJob extends LoggedJob {
private static final Log LOG = LogFactory.getLog(ParsedJob.class); private static final Logger LOG = LoggerFactory.getLogger(ParsedJob.class);
private Map<String, Long> totalCountersMap = new HashMap<String, Long>(); private Map<String, Long> totalCountersMap = new HashMap<String, Long>();
private Map<String, Long> mapCountersMap = new HashMap<String, Long>(); private Map<String, Long> mapCountersMap = new HashMap<String, Long>();

View File

@ -22,8 +22,8 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapreduce.jobhistory.JhCounters; import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
/** /**
@ -33,7 +33,7 @@ import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
*/ */
public class ParsedTask extends LoggedTask { public class ParsedTask extends LoggedTask {
private static final Log LOG = LogFactory.getLog(ParsedTask.class); private static final Logger LOG = LoggerFactory.getLogger(ParsedTask.class);
private String diagnosticInfo; private String diagnosticInfo;
private String failedDueToAttempt; private String failedDueToAttempt;

View File

@ -21,8 +21,8 @@ package org.apache.hadoop.tools.rumen;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.mapreduce.jobhistory.JhCounters; import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
/** /**
@ -32,7 +32,7 @@ import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
*/ */
public class ParsedTaskAttempt extends LoggedTaskAttempt { public class ParsedTaskAttempt extends LoggedTaskAttempt {
private static final Log LOG = LogFactory.getLog(ParsedTaskAttempt.class); private static final Logger LOG = LoggerFactory.getLogger(ParsedTaskAttempt.class);
private String diagnosticInfo; private String diagnosticInfo;
private String trackerName; private String trackerName;

View File

@ -21,8 +21,8 @@ import java.nio.charset.Charset;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
/** /**
* The purpose of this class is to generate new random seeds from a master * The purpose of this class is to generate new random seeds from a master
@ -42,7 +42,7 @@ import org.apache.commons.logging.LogFactory;
* http://www.iro.umontreal.ca/~lecuyer/myftp/streams00/ * http://www.iro.umontreal.ca/~lecuyer/myftp/streams00/
*/ */
public class RandomSeedGenerator { public class RandomSeedGenerator {
private static Log LOG = LogFactory.getLog(RandomSeedGenerator.class); private static Logger LOG = LoggerFactory.getLogger(RandomSeedGenerator.class);
private static final Charset UTF_8 = Charset.forName("UTF-8"); private static final Charset UTF_8 = Charset.forName("UTF-8");
/** MD5 algorithm instance, one for each thread. */ /** MD5 algorithm instance, one for each thread. */

View File

@ -30,8 +30,8 @@ import java.util.Properties;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -49,7 +49,7 @@ import org.apache.hadoop.util.ToolRunner;
* The main driver of the Rumen Parser. * The main driver of the Rumen Parser.
*/ */
public class TraceBuilder extends Configured implements Tool { public class TraceBuilder extends Configured implements Tool {
static final private Log LOG = LogFactory.getLog(TraceBuilder.class); static final private Logger LOG = LoggerFactory.getLogger(TraceBuilder.class);
static final int RUN_METHOD_FAILED_EXIT_CODE = 3; static final int RUN_METHOD_FAILED_EXIT_CODE = 3;
@ -310,6 +310,6 @@ public class TraceBuilder extends Configured implements Tool {
} }
void finish() { void finish() {
IOUtils.cleanup(LOG, traceWriter, topologyWriter); IOUtils.cleanupWithLogger(LOG, traceWriter, topologyWriter);
} }
} }

View File

@ -23,8 +23,8 @@ import java.util.Map;
import java.util.Random; import java.util.Random;
import java.util.HashMap; import java.util.HashMap;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TaskStatus.State; import org.apache.hadoop.mapred.TaskStatus.State;
@ -49,7 +49,7 @@ import org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants.Values;
*/ */
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class ZombieJob implements JobStory { public class ZombieJob implements JobStory {
static final Log LOG = LogFactory.getLog(ZombieJob.class); static final Logger LOG = LoggerFactory.getLogger(ZombieJob.class);
private final LoggedJob job; private final LoggedJob job;
private Map<TaskID, LoggedTask> loggedTaskMap; private Map<TaskID, LoggedTask> loggedTaskMap;
private Map<TaskAttemptID, LoggedTaskAttempt> loggedTaskAttemptMap; private Map<TaskAttemptID, LoggedTaskAttempt> loggedTaskAttemptMap;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.yarn.sls.resourcemanager; package org.apache.hadoop.yarn.sls.resourcemanager;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
@ -42,7 +42,7 @@ import java.util.Map;
public class MockAMLauncher extends ApplicationMasterLauncher public class MockAMLauncher extends ApplicationMasterLauncher
implements EventHandler<AMLauncherEvent> { implements EventHandler<AMLauncherEvent> {
private static final Log LOG = LogFactory.getLog( private static final Logger LOG = LoggerFactory.getLogger(
MockAMLauncher.class); MockAMLauncher.class);
Map<String, AMSimulator> amMap; Map<String, AMSimulator> amMap;

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.yarn.sls.synthetic; package org.apache.hadoop.yarn.sls.synthetic;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.math3.random.JDKRandomGenerator; import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
@ -51,7 +51,7 @@ import static org.apache.hadoop.mapreduce.MRJobConfig.QUEUE_NAME;
public class SynthJob implements JobStory { public class SynthJob implements JobStory {
@SuppressWarnings("StaticVariableName") @SuppressWarnings("StaticVariableName")
private static Log LOG = LogFactory.getLog(SynthJob.class); private static Logger LOG = LoggerFactory.getLogger(SynthJob.class);
private static final long MIN_MEMORY = 1024; private static final long MIN_MEMORY = 1024;
private static final long MIN_VCORES = 1; private static final long MIN_VCORES = 1;

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.yarn.sls.synthetic; package org.apache.hadoop.yarn.sls.synthetic;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.math3.distribution.AbstractRealDistribution; import org.apache.commons.math3.distribution.AbstractRealDistribution;
import org.apache.commons.math3.random.JDKRandomGenerator; import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -53,7 +53,7 @@ import static org.codehaus.jackson.map.DeserializationConfig.Feature.FAIL_ON_UNK
public class SynthTraceJobProducer implements JobStoryProducer { public class SynthTraceJobProducer implements JobStoryProducer {
@SuppressWarnings("StaticVariableName") @SuppressWarnings("StaticVariableName")
private static final Log LOG = LogFactory.getLog(SynthTraceJobProducer.class); private static final Logger LOG = LoggerFactory.getLogger(SynthTraceJobProducer.class);
private final Configuration conf; private final Configuration conf;
private final AtomicInteger numJobs; private final AtomicInteger numJobs;

View File

@ -24,7 +24,8 @@ import java.util.Arrays;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Properties; import java.util.Properties;
import org.apache.commons.logging.*; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
@ -44,7 +45,7 @@ import org.apache.hadoop.io.Text;
*/ */
public abstract class PipeMapRed { public abstract class PipeMapRed {
protected static final Log LOG = LogFactory.getLog(PipeMapRed.class.getName()); protected static final Logger LOG = LoggerFactory.getLogger(PipeMapRed.class.getName());
/** /**
* Returns the Configuration. * Returns the Configuration.
@ -397,7 +398,7 @@ public abstract class PipeMapRed {
} }
} catch (Throwable th) { } catch (Throwable th) {
outerrThreadsThrowable = th; outerrThreadsThrowable = th;
LOG.warn(th); LOG.warn("{}", th);
} finally { } finally {
try { try {
if (clientIn_ != null) { if (clientIn_ != null) {
@ -405,7 +406,7 @@ public abstract class PipeMapRed {
clientIn_ = null; clientIn_ = null;
} }
} catch (IOException io) { } catch (IOException io) {
LOG.info(io); LOG.info("{}", io);
} }
} }
} }
@ -466,7 +467,7 @@ public abstract class PipeMapRed {
} }
} catch (Throwable th) { } catch (Throwable th) {
outerrThreadsThrowable = th; outerrThreadsThrowable = th;
LOG.warn(th); LOG.warn("{}", th);
try { try {
if (lineReader != null) { if (lineReader != null) {
lineReader.close(); lineReader.close();
@ -476,7 +477,7 @@ public abstract class PipeMapRed {
clientErr_ = null; clientErr_ = null;
} }
} catch (IOException io) { } catch (IOException io) {
LOG.info(io); LOG.info("{}", io);
} }
} }
} }
@ -531,13 +532,13 @@ public abstract class PipeMapRed {
clientOut_.flush(); clientOut_.flush();
clientOut_.close(); clientOut_.close();
} catch (IOException io) { } catch (IOException io) {
LOG.warn(io); LOG.warn("{}", io);
} }
} }
try { try {
waitOutputThreads(); waitOutputThreads();
} catch (IOException io) { } catch (IOException io) {
LOG.warn(io); LOG.warn("{}", io);
} }
if (sim != null) sim.destroy(); if (sim != null) sim.destroy();
LOG.info("mapRedFinished"); LOG.info("mapRedFinished");

View File

@ -30,7 +30,8 @@ import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.commons.logging.*; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* Shared functionality for hadoopStreaming formats. * Shared functionality for hadoopStreaming formats.
@ -40,7 +41,7 @@ import org.apache.commons.logging.*;
*/ */
public abstract class StreamBaseRecordReader implements RecordReader<Text, Text> { public abstract class StreamBaseRecordReader implements RecordReader<Text, Text> {
protected static final Log LOG = LogFactory.getLog(StreamBaseRecordReader.class.getName()); protected static final Logger LOG = LoggerFactory.getLogger(StreamBaseRecordReader.class.getName());
// custom JobConf properties for this class are prefixed with this namespace // custom JobConf properties for this class are prefixed with this namespace
final static String CONF_NS = "stream.recordreader."; final static String CONF_NS = "stream.recordreader.";

View File

@ -36,8 +36,8 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
@ -79,7 +79,7 @@ import static org.apache.hadoop.util.RunJar.MATCH_ANY;
*/ */
public class StreamJob implements Tool { public class StreamJob implements Tool {
protected static final Log LOG = LogFactory.getLog(StreamJob.class.getName()); protected static final Logger LOG = LoggerFactory.getLogger(StreamJob.class.getName());
final static String REDUCE_NONE = "NONE"; final static String REDUCE_NONE = "NONE";
/** -----------Streaming CLI Implementation **/ /** -----------Streaming CLI Implementation **/

View File

@ -20,8 +20,8 @@ package org.apache.hadoop.streaming.mapreduce;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -40,8 +40,8 @@ import org.apache.hadoop.streaming.StreamUtil;
*/ */
public abstract class StreamBaseRecordReader extends RecordReader<Text, Text> { public abstract class StreamBaseRecordReader extends RecordReader<Text, Text> {
protected static final Log LOG = LogFactory protected static final Logger LOG = LoggerFactory
.getLog(StreamBaseRecordReader.class.getName()); .getLogger(StreamBaseRecordReader.class.getName());
// custom JobConf properties for this class are prefixed with this namespace // custom JobConf properties for this class are prefixed with this namespace
final static String CONF_NS = "stream.recordreader."; final static String CONF_NS = "stream.recordreader.";

View File

@ -25,8 +25,8 @@ import java.util.Map;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream; import java.util.zip.ZipOutputStream;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -42,7 +42,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
*/ */
public class TestMultipleArchiveFiles extends TestStreaming public class TestMultipleArchiveFiles extends TestStreaming
{ {
private static final Log LOG = LogFactory.getLog(TestMultipleArchiveFiles.class); private static final Logger LOG = LoggerFactory.getLogger(TestMultipleArchiveFiles.class);
private StreamJob job; private StreamJob job;
private String INPUT_DIR = "multiple-archive-files/"; private String INPUT_DIR = "multiple-archive-files/";

View File

@ -23,8 +23,8 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -38,7 +38,7 @@ import org.junit.Test;
*/ */
public class TestStreamXmlMultipleRecords extends TestStreaming public class TestStreamXmlMultipleRecords extends TestStreaming
{ {
private static final Log LOG = LogFactory.getLog( private static final Logger LOG = LoggerFactory.getLogger(
TestStreamXmlMultipleRecords.class); TestStreamXmlMultipleRecords.class);
private boolean hasPerl = false; private boolean hasPerl = false;

View File

@ -31,8 +31,8 @@ import java.util.List;
import java.util.Properties; import java.util.Properties;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.ClusterMapReduceTestCase; import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
@ -51,8 +51,8 @@ import static org.junit.Assert.assertTrue;
public class TestStreamingBadRecords extends ClusterMapReduceTestCase public class TestStreamingBadRecords extends ClusterMapReduceTestCase
{ {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(TestStreamingBadRecords.class); LoggerFactory.getLogger(TestStreamingBadRecords.class);
private static final List<String> MAPPER_BAD_RECORDS = private static final List<String> MAPPER_BAD_RECORDS =
Arrays.asList("hey022","hey023","hey099"); Arrays.asList("hey022","hey023","hey099");

View File

@ -26,13 +26,13 @@ import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.util.Shell.ShellCommandExecutor;
class UtilTest { class UtilTest {
private static final Log LOG = LogFactory.getLog(UtilTest.class); private static final Logger LOG = LoggerFactory.getLogger(UtilTest.class);
/** /**
* Utility routine to recurisvely delete a directory. * Utility routine to recurisvely delete a directory.