MAPREDUCE-6998. Moving logging APIs over to slf4j in hadoop-mapreduce-client-jobclient. Contributed by Gergely Novák.

This commit is contained in:
Akira Ajisaka 2017-12-07 16:21:25 +09:00
parent 6cca5b3bcb
commit d4cae977a2
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
98 changed files with 411 additions and 394 deletions

View File

@ -23,8 +23,6 @@ import java.security.PrivilegedAction;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.mapreduce.JobID;
@ -35,13 +33,15 @@ import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ClientCache {
private final Configuration conf;
private final ResourceMgrDelegate rm;
private static final Log LOG = LogFactory.getLog(ClientCache.class);
private static final Logger LOG = LoggerFactory.getLogger(ClientCache.class);
private Map<JobID, ClientServiceDelegate> cache =
new HashMap<JobID, ClientServiceDelegate>();

View File

@ -29,8 +29,6 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.RPC;
@ -79,11 +77,14 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.client.ClientToAMTokenIdentifier;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
public class ClientServiceDelegate {
private static final Log LOG = LogFactory.getLog(ClientServiceDelegate.class);
private static final Logger LOG =
LoggerFactory.getLogger(ClientServiceDelegate.class);
private static final String UNAVAILABLE = "N/A";
// Caches for per-user NotRunningJobs

View File

@ -25,8 +25,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -78,11 +76,14 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
public class ResourceMgrDelegate extends YarnClient {
private static final Log LOG = LogFactory.getLog(ResourceMgrDelegate.class);
private static final Logger LOG =
LoggerFactory.getLogger(ResourceMgrDelegate.class);
private YarnConfiguration conf;
private ApplicationSubmissionContext application;

View File

@ -36,8 +36,6 @@ import java.util.Vector;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
@ -99,6 +97,8 @@ import org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.UnitsConversionUtil;
import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
@ -108,7 +108,7 @@ import com.google.common.annotations.VisibleForTesting;
@SuppressWarnings("unchecked")
public class YARNRunner implements ClientProtocol {
private static final Log LOG = LogFactory.getLog(YARNRunner.class);
private static final Logger LOG = LoggerFactory.getLogger(YARNRunner.class);
private static final String RACK_GROUP = "rack";
private static final String NODE_IF_RACK_GROUP = "node1";

View File

@ -19,9 +19,9 @@ package org.apache.hadoop.fi;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is responsible for the decision of when a fault
@ -42,7 +42,8 @@ import org.apache.hadoop.conf.Configuration;
*/
public class ProbabilityModel {
private static Random generator = new Random();
private static final Log LOG = LogFactory.getLog(ProbabilityModel.class);
private static final Logger LOG =
LoggerFactory.getLogger(ProbabilityModel.class);
static final String FPROB_NAME = "fi.";
private static final String ALL_PROBABILITIES = FPROB_NAME + "*";

View File

@ -20,10 +20,10 @@ package org.apache.hadoop.fs;
import java.io.IOException;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Reducer that accumulates values based on their type.
@ -47,7 +47,9 @@ public class AccumulatingReducer extends MapReduceBase
static final String VALUE_TYPE_LONG = "l:";
static final String VALUE_TYPE_FLOAT = "f:";
static final String VALUE_TYPE_STRING = "s:";
private static final Log LOG = LogFactory.getLog(AccumulatingReducer.class);
private static final Logger LOG =
LoggerFactory.getLogger(AccumulatingReducer.class);
protected String hostName;

View File

@ -28,8 +28,6 @@ import java.io.PrintStream;
import java.util.Date;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
@ -38,6 +36,8 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.*;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Distributed i/o benchmark.
@ -69,7 +69,7 @@ import org.junit.Test;
@Ignore
public class DFSCIOTest {
// Constants
private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
private static final Logger LOG = LoggerFactory.getLogger(DFSCIOTest.class);
private static final int TEST_TYPE_READ = 0;
private static final int TEST_TYPE_WRITE = 1;
private static final int TEST_TYPE_CLEANUP = 2;

View File

@ -33,8 +33,6 @@ import java.util.Vector;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
@ -42,6 +40,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.*;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Distributed checkup of the file system consistency.
@ -56,7 +56,8 @@ import org.junit.Ignore;
@Ignore
public class DistributedFSCheck extends TestCase {
// Constants
private static final Log LOG = LogFactory.getLog(DistributedFSCheck.class);
private static final Logger LOG =
LoggerFactory.getLogger(DistributedFSCheck.class);
private static final int TEST_TYPE_READ = 0;
private static final int TEST_TYPE_CLEANUP = 2;
private static final int DEFAULT_BUFFER_SIZE = 1000000;

View File

@ -34,8 +34,6 @@ import java.util.Map;
import java.util.StringTokenizer;
import java.util.HashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
@ -46,6 +44,8 @@ import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Job History Log Analyzer.
@ -144,7 +144,8 @@ import org.apache.hadoop.util.StringUtils;
*/
@SuppressWarnings("deprecation")
public class JHLogAnalyzer {
private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
private static final Logger LOG =
LoggerFactory.getLogger(JHLogAnalyzer.class);
// Constants
private static final String JHLA_ROOT_DIR =
System.getProperty("test.build.data", "stats/JHLA");

View File

@ -33,8 +33,6 @@ import java.util.Collection;
import java.util.Date;
import java.util.Random;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DistributedFileSystem;
@ -62,6 +60,8 @@ import org.apache.hadoop.util.ToolRunner;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Distributed i/o benchmark.
@ -92,7 +92,7 @@ import org.junit.Test;
*/
public class TestDFSIO implements Tool {
// Constants
private static final Log LOG = LogFactory.getLog(TestDFSIO.class);
private static final Logger LOG = LoggerFactory.getLogger(TestDFSIO.class);
private static final int DEFAULT_BUFFER_SIZE = 1000000;
private static final String BASE_FILE_NAME = "test_io_";
private static final String DEFAULT_RES_FILE_NAME = "TestDFSIO_results.log";

View File

@ -23,11 +23,11 @@ import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.File;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test Job History Log Analyzer.
@ -35,7 +35,8 @@ import org.junit.Test;
* @see JHLogAnalyzer
*/
public class TestJHLA {
private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
private static final Logger LOG =
LoggerFactory.getLogger(JHLogAnalyzer.class);
private String historyLog = System.getProperty("test.build.data",
"build/test/data") + "/history/test.log";

View File

@ -26,8 +26,6 @@ import java.net.UnknownHostException;
import java.util.EnumSet;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.CreateFlag;
@ -50,6 +48,8 @@ import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** The load generator is a tool for testing NameNode behavior under
* different client loads.
@ -63,7 +63,7 @@ import org.apache.hadoop.util.ToolRunner;
*
*/
public class LoadGeneratorMR extends LoadGenerator {
public static final Log LOG = LogFactory.getLog(LoadGenerator.class);
public static final Logger LOG = LoggerFactory.getLogger(LoadGenerator.class);
private static int numMapTasks = 1;
private String mrOutDir;

View File

@ -24,12 +24,12 @@ import java.io.OutputStream;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Operation which selects a random file and appends a random amount of bytes
@ -41,7 +41,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
*/
class AppendOp extends Operation {
private static final Log LOG = LogFactory.getLog(AppendOp.class);
private static final Logger LOG = LoggerFactory.getLogger(AppendOp.class);
AppendOp(ConfigExtractor cfg, Random rnd) {
super(AppendOp.class.getSimpleName(), cfg, rnd);

View File

@ -22,12 +22,12 @@ import java.text.NumberFormat;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.Constants.OperationType;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Simple access layer onto of a configuration object that extracts the slive
@ -35,7 +35,8 @@ import org.apache.hadoop.util.StringUtils;
*/
class ConfigExtractor {
private static final Log LOG = LogFactory.getLog(ConfigExtractor.class);
private static final Logger LOG =
LoggerFactory.getLogger(ConfigExtractor.class);
private Configuration config;

View File

@ -22,13 +22,13 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Operation which selects a random file and a random number of bytes to create
@ -42,7 +42,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
*/
class CreateOp extends Operation {
private static final Log LOG = LogFactory.getLog(CreateOp.class);
private static final Logger LOG = LoggerFactory.getLogger(CreateOp.class);
private static int DEF_IO_BUFFER_SIZE = 4096;

View File

@ -23,11 +23,11 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Operation which selects a random file and attempts to delete that file (if it
@ -39,7 +39,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
*/
class DeleteOp extends Operation {
private static final Log LOG = LogFactory.getLog(DeleteOp.class);
private static final Logger LOG = LoggerFactory.getLogger(DeleteOp.class);
DeleteOp(ConfigExtractor cfg, Random rnd) {
super(DeleteOp.class.getSimpleName(), cfg, rnd);

View File

@ -23,12 +23,12 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Operation which selects a random directory and attempts to list that
@ -41,7 +41,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
*/
class ListOp extends Operation {
private static final Log LOG = LogFactory.getLog(ListOp.class);
private static final Logger LOG = LoggerFactory.getLogger(ListOp.class);
ListOp(ConfigExtractor cfg, Random rnd) {
super(ListOp.class.getSimpleName(), cfg, rnd);

View File

@ -23,11 +23,11 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Operation which selects a random directory and attempts to create that
@ -40,7 +40,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
*/
class MkdirOp extends Operation {
private static final Log LOG = LogFactory.getLog(MkdirOp.class);
private static final Logger LOG = LoggerFactory.getLogger(MkdirOp.class);
MkdirOp(ConfigExtractor cfg, Random rnd) {
super(MkdirOp.class.getSimpleName(), cfg, rnd);

View File

@ -24,12 +24,12 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.DataVerifier.VerifyOutput;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Operation which selects a random file and selects a random read size (from
@ -43,7 +43,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
* number of failures and the amount of time taken to fail
*/
class ReadOp extends Operation {
private static final Log LOG = LogFactory.getLog(ReadOp.class);
private static final Logger LOG = LoggerFactory.getLogger(ReadOp.class);
ReadOp(ConfigExtractor cfg, Random rnd) {
super(ReadOp.class.getSimpleName(), cfg, rnd);

View File

@ -23,11 +23,11 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Operation which selects a random file and a second random file and attempts
@ -60,7 +60,7 @@ class RenameOp extends Operation {
}
}
private static final Log LOG = LogFactory.getLog(RenameOp.class);
private static final Logger LOG = LoggerFactory.getLogger(RenameOp.class);
RenameOp(ConfigExtractor cfg, Random rnd) {
super(RenameOp.class.getSimpleName(), cfg, rnd);

View File

@ -24,8 +24,8 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Class which provides a report for the given operation output
@ -48,7 +48,7 @@ class ReportWriter {
static final String NOT_FOUND = "files_not_found";
static final String BAD_FILES = "bad_files";
private static final Log LOG = LogFactory.getLog(ReportWriter.class);
private static final Logger LOG = LoggerFactory.getLogger(ReportWriter.class);
private static final String SECTION_DELIM = "-------------";

View File

@ -21,10 +21,10 @@ package org.apache.hadoop.fs.slive;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Operation which sleeps for a given number of milliseconds according to the
@ -32,7 +32,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
*/
class SleepOp extends Operation {
private static final Log LOG = LogFactory.getLog(SleepOp.class);
private static final Logger LOG = LoggerFactory.getLogger(SleepOp.class);
SleepOp(ConfigExtractor cfg, Random rnd) {
super(SleepOp.class.getSimpleName(), cfg, rnd);

View File

@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.apache.hadoop.io.Text;
@ -35,6 +33,8 @@ import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The slive class which sets up the mapper to be used which itself will receive
@ -45,7 +45,7 @@ import org.apache.hadoop.util.StringUtils;
public class SliveMapper extends MapReduceBase implements
Mapper<Object, Object, Text, Text> {
private static final Log LOG = LogFactory.getLog(SliveMapper.class);
private static final Logger LOG = LoggerFactory.getLogger(SliveMapper.class);
private static final String OP_TYPE = SliveMapper.class.getSimpleName();

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.fs.slive;
import java.io.IOException;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
@ -30,6 +28,8 @@ import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The slive reducer which iterates over the given input values and merges them
@ -38,7 +38,7 @@ import org.apache.hadoop.util.StringUtils;
public class SliveReducer extends MapReduceBase implements
Reducer<Text, Text, Text, Text> {
private static final Log LOG = LogFactory.getLog(SliveReducer.class);
private static final Logger LOG = LoggerFactory.getLogger(SliveReducer.class);
private ConfigExtractor config;

View File

@ -30,8 +30,6 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@ -45,6 +43,8 @@ import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Slive test entry point + main program
@ -61,7 +61,7 @@ import org.apache.hadoop.util.ToolRunner;
*/
public class SliveTest implements Tool {
private static final Log LOG = LogFactory.getLog(SliveTest.class);
private static final Logger LOG = LoggerFactory.getLogger(SliveTest.class);
// ensures the hdfs configurations are loaded if they exist
static {

View File

@ -31,8 +31,6 @@ import java.util.List;
import java.util.Random;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -43,13 +41,15 @@ import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Junit 4 test for slive
*/
public class TestSlive {
private static final Log LOG = LogFactory.getLog(TestSlive.class);
private static final Logger LOG = LoggerFactory.getLogger(TestSlive.class);
private static final Random rnd = new Random(1L);
@ -258,13 +258,13 @@ public class TestSlive {
DataWriter writer = new DataWriter(rnd);
FileOutputStream fs = new FileOutputStream(fn);
GenerateOutput ostat = writer.writeSegment(byteAm, fs);
LOG.info(ostat);
LOG.info(ostat.toString());
fs.close();
assertTrue(ostat.getBytesWritten() == byteAm);
DataVerifier vf = new DataVerifier();
FileInputStream fin = new FileInputStream(fn);
VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
LOG.info(vfout);
LOG.info(vfout.toString());
fin.close();
assertEquals(vfout.getBytesRead(), byteAm);
assertTrue(vfout.getChunksDifferent() == 0);

View File

@ -22,12 +22,12 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Operation which selects a random file and truncates a random amount of bytes
@ -40,7 +40,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
*/
class TruncateOp extends Operation {
private static final Log LOG = LogFactory.getLog(TruncateOp.class);
private static final Logger LOG = LoggerFactory.getLogger(TruncateOp.class);
TruncateOp(ConfigExtractor cfg, Random rnd) {
super(TruncateOp.class.getSimpleName(), cfg, rnd);

View File

@ -26,12 +26,12 @@ import java.util.Map;
import java.util.Random;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.slive.Constants.Distribution;
import org.apache.hadoop.fs.slive.Constants.OperationType;
import org.apache.hadoop.fs.slive.Weights.UniformWeight;
import org.apache.hadoop.fs.slive.ObserveableOp.Observer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is the main handler that selects operations to run using the
@ -47,7 +47,8 @@ class WeightSelector {
Double weight(int elapsed, int duration);
}
private static final Log LOG = LogFactory.getLog(WeightSelector.class);
private static final Logger LOG =
LoggerFactory.getLogger(WeightSelector.class);
private static class OperationInfo {
Integer amountLeft;

View File

@ -30,8 +30,6 @@ import java.util.Date;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
@ -57,6 +55,8 @@ import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This program executes a specified operation that applies load to
@ -78,8 +78,7 @@ import org.apache.hadoop.util.ToolRunner;
*/
public class NNBench extends Configured implements Tool {
private static final Log LOG = LogFactory.getLog(
"org.apache.hadoop.hdfs.NNBench");
private static final Logger LOG = LoggerFactory.getLogger(NNBench.class);
private static String CONTROL_DIR_NAME = "control";
private static String OUTPUT_DIR_NAME = "output";

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.hdfs;
import java.io.IOException;
import java.util.Date;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -30,6 +28,8 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.mapred.JobConf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This program executes a specified operation that applies load to
@ -45,8 +45,8 @@ import org.apache.hadoop.mapred.JobConf;
*/
public class NNBenchWithoutMR {
private static final Log LOG = LogFactory.getLog(
"org.apache.hadoop.hdfs.NNBench");
private static final Logger LOG =
LoggerFactory.getLogger(NNBenchWithoutMR.class);
// variable initialzed from command line arguments
private static long startTime = 0;

View File

@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.Date;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@ -37,10 +35,12 @@ import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BigMapOutput extends Configured implements Tool {
public static final Log LOG =
LogFactory.getLog(BigMapOutput.class.getName());
public static final Logger LOG =
LoggerFactory.getLogger(BigMapOutput.class);
private static Random random = new Random();
public static String MIN_KEY = "mapreduce.bmo.minkey";
public static String MIN_VALUE = "mapreduce.bmo.minvalue";

View File

@ -24,8 +24,6 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -33,13 +31,15 @@ import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Runs a job multiple times and takes average of all runs.
*/
public class MRBench extends Configured implements Tool{
private static final Log LOG = LogFactory.getLog(MRBench.class);
private static final Logger LOG = LoggerFactory.getLogger(MRBench.class);
private static final String DEFAULT_INPUT_SUB = "mr_input";
private static final String DEFAULT_OUTPUT_SUB = "mr_output";

View File

@ -20,13 +20,13 @@ package org.apache.hadoop.mapred;
import java.io.IOException;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is an MR2 replacement for older MR1 MiniMRCluster, that was used
@ -45,7 +45,8 @@ import org.apache.hadoop.security.UserGroupInformation;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MiniMRCluster {
private static final Log LOG = LogFactory.getLog(MiniMRCluster.class);
private static final Logger LOG =
LoggerFactory.getLogger(MiniMRCluster.class);
private MiniMRClientCluster mrClientCluster;
@ -98,7 +99,7 @@ public class MiniMRCluster {
try {
jobConf = new JobConf(mrClientCluster.getConfig());
} catch (IOException e) {
LOG.error(e);
LOG.error(e.getMessage());
}
return jobConf;
}
@ -108,7 +109,7 @@ public class MiniMRCluster {
try {
jobConf = new JobConf(mrClientCluster.getConfig());
} catch (IOException e) {
LOG.error(e);
LOG.error(e.getMessage());
}
return jobConf;
}
@ -224,7 +225,7 @@ public class MiniMRCluster {
try {
jobConf = new JobConf(mrClientCluster.getConfig());
} catch (IOException e) {
LOG.error(e);
LOG.error(e.getMessage());
}
return jobConf;
}
@ -266,7 +267,7 @@ public class MiniMRCluster {
try {
mrClientCluster.stop();
} catch (IOException e) {
LOG.error(e);
LOG.error(e.getMessage());
}
}

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.mapred;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.service.Service.STATE;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An adapter for MiniMRYarnCluster providing a MiniMRClientCluster interface.
@ -34,7 +34,8 @@ public class MiniMRYarnClusterAdapter implements MiniMRClientCluster {
private MiniMRYarnCluster miniMRYarnCluster;
private static final Log LOG = LogFactory.getLog(MiniMRYarnClusterAdapter.class);
private static final Logger LOG =
LoggerFactory.getLogger(MiniMRYarnClusterAdapter.class);
public MiniMRYarnClusterAdapter(MiniMRYarnCluster miniMRYarnCluster) {
this.miniMRYarnCluster = miniMRYarnCluster;

View File

@ -29,8 +29,6 @@ import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
@ -41,6 +39,8 @@ import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class tests reliability of the framework in the face of failures of
@ -73,7 +73,8 @@ import org.apache.hadoop.util.ToolRunner;
public class ReliabilityTest extends Configured implements Tool {
private String dir;
private static final Log LOG = LogFactory.getLog(ReliabilityTest.class);
private static final Logger LOG =
LoggerFactory.getLogger(ReliabilityTest.class);
private void displayUsage() {
LOG.info("This must be run in only the distributed mode " +
@ -207,7 +208,7 @@ public class ReliabilityTest extends Configured implements Tool {
args);
checkJobExitStatus(status, jobClass);
} catch (Exception e) {
LOG.fatal("JOB " + jobClass + " failed to run");
LOG.error("JOB " + jobClass + " failed to run");
System.exit(-1);
}
}
@ -325,7 +326,7 @@ public class ReliabilityTest extends Configured implements Tool {
killed = true;
return;
} catch (Exception e) {
LOG.fatal(StringUtils.stringifyException(e));
LOG.error(StringUtils.stringifyException(e));
}
}
}
@ -495,7 +496,7 @@ public class ReliabilityTest extends Configured implements Tool {
} catch (InterruptedException ie) {
killed = true;
} catch (Exception e) {
LOG.fatal(StringUtils.stringifyException(e));
LOG.error(StringUtils.stringifyException(e));
}
}
}

View File

@ -30,8 +30,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
@ -41,6 +39,8 @@ import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@ -48,8 +48,8 @@ import static org.junit.Assert.assertNotNull;
@Ignore
public class TestBadRecords extends ClusterMapReduceTestCase {
private static final Log LOG =
LogFactory.getLog(TestBadRecords.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestBadRecords.class);
private static final List<String> MAPPER_BAD_RECORDS =
Arrays.asList("hello01","hello04","hello05");

View File

@ -24,8 +24,6 @@ import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.mapreduce.Cluster;
@ -144,6 +142,8 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestClientRedirect {
@ -151,7 +151,8 @@ public class TestClientRedirect {
DefaultMetricsSystem.setMiniClusterMode(true);
}
private static final Log LOG = LogFactory.getLog(TestClientRedirect.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestClientRedirect.class);
private static final String RMADDRESS = "0.0.0.0:8054";
private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);

View File

@ -26,16 +26,15 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
import org.apache.hadoop.mapred.lib.CombineFileSplit;
import org.apache.hadoop.mapred.lib.CombineFileRecordReader;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class TestCombineFileInputFormat {
private static final Log LOG =
LogFactory.getLog(TestCombineFileInputFormat.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestCombineFileInputFormat.class);
private static JobConf defaultConf = new JobConf();
private static FileSystem localFs = null;

View File

@ -25,8 +25,6 @@ import java.io.IOException;
import java.util.BitSet;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -36,10 +34,12 @@ import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapred.lib.CombineFileSplit;
import org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestCombineSequenceFileInputFormat {
private static final Log LOG =
LogFactory.getLog(TestCombineSequenceFileInputFormat.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestCombineSequenceFileInputFormat.class);
private static Configuration conf = new Configuration();
private static FileSystem localFs = null;

View File

@ -31,8 +31,6 @@ import java.util.BitSet;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
@ -43,10 +41,12 @@ import org.apache.hadoop.mapred.lib.CombineFileSplit;
import org.apache.hadoop.mapred.lib.CombineTextInputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestCombineTextInputFormat {
private static final Log LOG =
LogFactory.getLog(TestCombineTextInputFormat.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestCombineTextInputFormat.class);
private static JobConf defaultConf = new JobConf();
private static FileSystem localFs = null;

View File

@ -30,8 +30,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.zip.Inflater;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
@ -46,10 +44,13 @@ import org.apache.hadoop.util.ReflectionUtils;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Ignore
public class TestConcatenatedCompressedInput {
private static final Log LOG =
LogFactory.getLog(TestConcatenatedCompressedInput.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestConcatenatedCompressedInput.class);
private static int MAX_LENGTH = 10000;
private static JobConf defaultConf = new JobConf();
private static FileSystem localFs = null;

View File

@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -35,14 +33,18 @@ import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
public class TestFixedLengthInputFormat {
private static Log LOG;
private static final Logger LOG =
LoggerFactory.getLogger(TestFixedLengthInputFormat.class);
private static Configuration defaultConf;
private static FileSystem localFs;
private static Path workDir;
@ -55,7 +57,6 @@ public class TestFixedLengthInputFormat {
@BeforeClass
public static void onlyOnce() {
try {
LOG = LogFactory.getLog(TestFixedLengthInputFormat.class.getName());
defaultConf = new Configuration();
defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf);

View File

@ -22,9 +22,6 @@ import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
@ -36,6 +33,9 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import static org.junit.Assert.*;
/**
@ -54,7 +54,8 @@ public class TestJobCleanup {
private static Path emptyInDir = null;
private static int outDirs = 0;
private static Log LOG = LogFactory.getLog(TestJobCleanup.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestJobCleanup.class);
@BeforeClass
public static void setUp() throws IOException {

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapred;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -34,13 +32,15 @@ import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A JUnit test to test Job System Directory with Mini-DFS.
*/
public class TestJobSysDirWithDFS {
private static final Log LOG =
LogFactory.getLog(TestJobSysDirWithDFS.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestJobSysDirWithDFS.class);
static final int NUM_MAPS = 10;
static final int NUM_SAMPLES = 100000;

View File

@ -21,19 +21,21 @@ package org.apache.hadoop.mapred;
import java.io.*;
import java.util.*;
import org.apache.commons.logging.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class TestKeyValueTextInputFormat {
private static final Log LOG =
LogFactory.getLog(TestKeyValueTextInputFormat.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestKeyValueTextInputFormat.class);
private static int MAX_LENGTH = 10000;

View File

@ -31,8 +31,6 @@ import java.util.List;
import java.util.Set;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -59,14 +57,16 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Sets;
public class TestMRTimelineEventHandling {
private static final String TIMELINE_AUX_SERVICE_NAME = "timeline_collector";
private static final Log LOG =
LogFactory.getLog(TestMRTimelineEventHandling.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestMRTimelineEventHandling.class);
@Test
public void testTimelineServiceStartInMiniCluster() throws Exception {

View File

@ -22,8 +22,6 @@ import java.io.File;
import java.io.IOException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.ipc.ProtocolSignature;
@ -39,6 +37,9 @@ import org.apache.hadoop.mapreduce.split.JobSplitWriter;
import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertTrue;
/**
@ -56,7 +57,8 @@ import static org.junit.Assert.assertTrue;
* validated here.
*/
public class TestMapProgress {
public static final Log LOG = LogFactory.getLog(TestMapProgress.class);
public static final Logger LOG =
LoggerFactory.getLogger(TestMapProgress.class);
private static String TEST_ROOT_DIR;
static {
String root = new File(System.getProperty("test.build.data", "/tmp"))

View File

@ -27,8 +27,6 @@ import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -46,6 +44,8 @@ import org.apache.hadoop.util.Shell;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Class to test mapred task's
@ -53,8 +53,8 @@ import org.junit.Test;
* - child env
*/
public class TestMiniMRChildTask {
private static final Log LOG =
LogFactory.getLog(TestMiniMRChildTask.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestMiniMRChildTask.class);
private final static String OLD_CONFIGS = "test.old.configs";
private final static String TASK_OPTS_VAL = "-Xmx200m";

View File

@ -21,13 +21,14 @@ import java.io.IOException;
import java.util.BitSet;
import java.util.HashMap;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@ -35,7 +36,8 @@ public class TestMultiFileInputFormat {
private static JobConf job = new JobConf();
private static final Log LOG = LogFactory.getLog(TestMultiFileInputFormat.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestMultiFileInputFormat.class);
private static final int MAX_SPLIT_COUNT = 10000;
private static final int SPLIT_COUNT_INCR = 6000;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.mapred;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BooleanWritable;
@ -31,17 +31,16 @@ import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.junit.Test;
import java.io.IOException;
import java.util.Random;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestSequenceFileAsBinaryOutputFormat {
private static final Log LOG =
LogFactory.getLog(TestSequenceFileAsBinaryOutputFormat.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestSequenceFileAsBinaryOutputFormat.class);
private static final int RECORDS = 10000;
// A random task attempt id for testing.
private static final String attempt = "attempt_200707121733_0001_m_000000_0";

View File

@ -17,18 +17,18 @@
*/
package org.apache.hadoop.mapred;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.SortedRanges.Range;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Iterator;
import static org.junit.Assert.assertEquals;
public class TestSortedRanges {
private static final Log LOG =
LogFactory.getLog(TestSortedRanges.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestSortedRanges.class);
@Test
public void testAdd() {
@ -56,7 +56,7 @@ public class TestSortedRanges {
sr.add(new Range(21,50));
assertEquals(70, sr.getIndicesCount());
LOG.debug(sr);
LOG.debug(sr.toString());
Iterator<Long> it = sr.skipRangeIterator();
int i = 0;
@ -96,7 +96,7 @@ public class TestSortedRanges {
sr.remove(new SortedRanges.Range(5,1));
assertEquals(3, sr.getIndicesCount());
LOG.debug(sr);
LOG.debug(sr.toString());
}
}

View File

@ -18,8 +18,9 @@
package org.apache.hadoop.mapred;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -30,10 +31,8 @@ import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.util.Progressable;
import org.junit.Test;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.URI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@ -42,8 +41,8 @@ import static org.junit.Assert.fail;
* A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795).
*/
public class TestSpecialCharactersInOutputPath {
private static final Log LOG =
LogFactory.getLog(TestSpecialCharactersInOutputPath.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestSpecialCharactersInOutputPath.class);
private static final String OUTPUT_FILENAME = "result[0]";

View File

@ -17,13 +17,11 @@
*/
package org.apache.hadoop.mapred;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestTaskStatus {
private static final Log LOG = LogFactory.getLog(TestTaskStatus.class);
@Test
public void testMapTaskStatusStartAndFinishTimes() {

View File

@ -38,16 +38,15 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class TestTextInputFormat {
private static final Log LOG =
LogFactory.getLog(TestTextInputFormat.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestTextInputFormat.class);
private static int MAX_LENGTH = 10000;

View File

@ -48,8 +48,6 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@ -117,7 +115,6 @@ import org.apache.log4j.Appender;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Layout;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.SimpleLayout;
import org.apache.log4j.WriterAppender;
import org.apache.log4j.spi.LoggingEvent;
@ -128,6 +125,8 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableList;
@ -136,7 +135,8 @@ import com.google.common.collect.ImmutableList;
* fine
*/
public class TestYARNRunner {
private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestYARNRunner.class);
private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
// prefix before <LOG_DIR>/profile.out
@ -574,7 +574,8 @@ public class TestYARNRunner {
}
@Test(timeout=20000)
public void testWarnCommandOpts() throws Exception {
Logger logger = Logger.getLogger(YARNRunner.class);
org.apache.log4j.Logger logger =
org.apache.log4j.Logger.getLogger(YARNRunner.class);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
Layout layout = new SimpleLayout();
@ -1006,7 +1007,8 @@ public class TestYARNRunner {
MRJobConfig.RESOURCE_TYPE_NAME_MEMORY,
MRJobConfig.RESOURCE_TYPE_ALTERNATIVE_NAME_MEMORY)) {
TestAppender testAppender = new TestAppender();
Logger logger = Logger.getLogger(YARNRunner.class);
org.apache.log4j.Logger logger =
org.apache.log4j.Logger.getLogger(YARNRunner.class);
logger.addAppender(testAppender);
try {
JobConf jobConf = new JobConf();

View File

@ -22,8 +22,6 @@ import java.io.IOException;
import java.io.File;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -36,6 +34,8 @@ import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Distributed threaded map benchmark.
@ -52,7 +52,8 @@ import org.apache.hadoop.util.ToolRunner;
public class ThreadedMapBenchmark extends Configured implements Tool {
private static final Log LOG = LogFactory.getLog(ThreadedMapBenchmark.class);
private static final Logger LOG =
LoggerFactory.getLogger(ThreadedMapBenchmark.class);
private static Path BASE_DIR =
new Path(System.getProperty("test.build.data",
File.separator + "benchmarks" + File.separator

View File

@ -34,8 +34,6 @@ import java.util.Properties;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeoutException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -60,6 +58,8 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
@ -69,7 +69,7 @@ import com.google.common.base.Supplier;
*/
public class UtilsForTests {
static final Log LOG = LogFactory.getLog(UtilsForTests.class);
static final Logger LOG = LoggerFactory.getLogger(UtilsForTests.class);
final static long KB = 1024L * 1;
final static long MB = 1024L * KB;

View File

@ -21,13 +21,14 @@ package org.apache.hadoop.mapred.jobcontrol;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobConf;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
/**
@ -35,8 +36,8 @@ import static org.junit.Assert.assertEquals;
*/
public class TestLocalJobControl extends HadoopTestCase {
public static final Log LOG = LogFactory.getLog(TestLocalJobControl.class
.getName());
public static final Logger LOG =
LoggerFactory.getLogger(TestLocalJobControl.class);
/**
* Initialises a new instance of this test case to use a Local MR cluster and

View File

@ -23,8 +23,6 @@ import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileUtil;
@ -45,14 +43,16 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
@Ignore
public class TestPipes {
private static final Log LOG =
LogFactory.getLog(TestPipes.class.getName());
private static final Logger LOG = LoggerFactory.getLogger(TestPipes.class);
private static Path cppExamples =
new Path(System.getProperty("install.c++.examples"));

View File

@ -17,20 +17,20 @@
*/
package org.apache.hadoop.mapreduce;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
import java.util.ArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A sleep job whose mappers create 1MB buffer for every record.
*/
public class GrowingSleepJob extends SleepJob {
private static final Log LOG = LogFactory.getLog(GrowingSleepJob.class);
private static final Logger LOG =
LoggerFactory.getLogger(GrowingSleepJob.class);
public static class GrowingSleepMapper extends SleepMapper {
private final int MB = 1024 * 1024;

View File

@ -20,19 +20,20 @@ package org.apache.hadoop.mapreduce;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Used to parse job history and configuration files.
*/
class JobHistoryFileParser {
private static final Log LOG = LogFactory.getLog(JobHistoryFileParser.class);
private static final Logger LOG =
LoggerFactory.getLogger(JobHistoryFileParser.class);
private final FileSystem fs;

View File

@ -24,19 +24,18 @@ import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class JobHistoryFileReplayHelper {
private static final Log LOG =
LogFactory.getLog(JobHistoryFileReplayHelper.class);
private static final Logger LOG =
LoggerFactory.getLogger(JobHistoryFileReplayHelper.class);
static final String PROCESSING_PATH = "processing path";
static final String REPLAY_MODE = "replay mode";
static final int WRITE_ALL_AT_ONCE = 1;

View File

@ -23,8 +23,6 @@ import java.util.Collection;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
@ -38,7 +36,8 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Mapper for TimelineServicePerformanceV1 that replays job history files to the
@ -48,8 +47,8 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
class JobHistoryFileReplayMapperV1 extends
org.apache.hadoop.mapreduce.
Mapper<IntWritable,IntWritable,Writable,Writable> {
private static final Log LOG =
LogFactory.getLog(JobHistoryFileReplayMapperV1.class);
private static final Logger LOG =
LoggerFactory.getLogger(JobHistoryFileReplayMapperV1.class);
public void map(IntWritable key, IntWritable val, Context context) throws IOException {
// collect the apps it needs to process

View File

@ -23,8 +23,6 @@ import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper.JobFiles;
@ -38,6 +36,8 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector;
import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext;
import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Mapper for TimelineServicePerformance that replays job history files to the
@ -45,8 +45,8 @@ import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollector
*
*/
class JobHistoryFileReplayMapperV2 extends EntityWriterV2 {
private static final Log LOG =
LogFactory.getLog(JobHistoryFileReplayMapperV2.class);
private static final Logger LOG =
LoggerFactory.getLogger(JobHistoryFileReplayMapperV2.class);
@Override
protected void writeEntities(Configuration tlConf,

View File

@ -31,8 +31,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -46,22 +44,19 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.TaskLog;
import org.apache.hadoop.mapred.Utils;
import org.apache.hadoop.mapred.TaskLog.LogName;
import org.apache.hadoop.mapred.TaskLog.Reader;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility methods used in various Job Control unit tests.
*/
public class MapReduceTestUtil {
public static final Log LOG =
LogFactory.getLog(MapReduceTestUtil.class.getName());
public static final Logger LOG =
LoggerFactory.getLogger(MapReduceTestUtil.class);
static private Random rand = new Random();

View File

@ -34,8 +34,6 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
@ -46,6 +44,8 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.eclipse.jetty.util.ajax.JSON;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class drives the creation of a mini-cluster on the local machine. By
@ -60,8 +60,8 @@ import org.eclipse.jetty.util.ajax.JSON;
* To shutdown the cluster, kill the process.
*/
public class MiniHadoopClusterManager {
private static final Log LOG = LogFactory
.getLog(MiniHadoopClusterManager.class);
private static final Logger LOG =
LoggerFactory.getLogger(MiniHadoopClusterManager.class);
private MiniMRClientCluster mr;
private MiniDFSCluster dfs;

View File

@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Writable;
@ -32,6 +30,8 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Adds simple entities with random string payload, events, metrics, and
@ -41,7 +41,8 @@ class SimpleEntityWriterV1
extends org.apache.hadoop.mapreduce.Mapper
<IntWritable, IntWritable, Writable, Writable>
implements SimpleEntityWriterConstants {
private static final Log LOG = LogFactory.getLog(SimpleEntityWriterV1.class);
private static final Logger LOG =
LoggerFactory.getLogger(SimpleEntityWriterV1.class);
public void map(IntWritable key, IntWritable val, Context context)
throws IOException {

View File

@ -22,8 +22,6 @@ import java.io.IOException;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.TimelineServicePerformance.PerfCounters;
import org.apache.hadoop.security.UserGroupInformation;
@ -35,6 +33,8 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
import org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector;
import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext;
import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Adds simple entities with random string payload, events, metrics, and
@ -42,7 +42,8 @@ import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollector
*/
class SimpleEntityWriterV2 extends EntityWriterV2
implements SimpleEntityWriterConstants {
private static final Log LOG = LogFactory.getLog(SimpleEntityWriterV2.class);
private static final Logger LOG =
LoggerFactory.getLogger(SimpleEntityWriterV2.class);
protected void writeEntities(Configuration tlConf,
TimelineCollectorManager manager, Context context) throws IOException {

View File

@ -19,20 +19,19 @@ package org.apache.hadoop.mapreduce;
import java.util.Random;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapreduce.counters.LimitExceededException;
import org.apache.hadoop.mapreduce.counters.Limits;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* TestCounters checks the sanity and recoverability of {@code Counters}
*/
public class TestCounters {
static final Log LOG = LogFactory.getLog(TestCounters.class);
static final Logger LOG = LoggerFactory.getLogger(TestCounters.class);
/**
* Verify counter value works

View File

@ -17,8 +17,6 @@
*/
package org.apache.hadoop.mapreduce;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
@ -33,6 +31,8 @@ import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -54,7 +54,8 @@ import static org.junit.Assert.assertTrue;
*/
public class TestLocalRunner {
private static final Log LOG = LogFactory.getLog(TestLocalRunner.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestLocalRunner.class);
private static int INPUT_SIZES[] =
new int[] { 50000, 500, 500, 20, 5000, 500};
@ -330,9 +331,9 @@ public class TestLocalRunner {
try {
job.waitForCompletion(true);
} catch (InterruptedException ie) {
LOG.fatal("Interrupted while waiting for job completion", ie);
LOG.error("Interrupted while waiting for job completion", ie);
for (int i = 0; i < 10; i++) {
LOG.fatal("Dumping stacks");
LOG.error("Dumping stacks");
ReflectionUtils.logThreadInfo(LOG, "multimap threads", 0);
Thread.sleep(1000);
}

View File

@ -17,8 +17,6 @@
*/
package org.apache.hadoop.mapreduce;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
@ -33,6 +31,8 @@ import org.apache.hadoop.util.ToolRunner;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
@ -60,7 +60,8 @@ import static org.junit.Assert.fail;
*/
public class TestMRJobClient extends ClusterMapReduceTestCase {
private static final Log LOG = LogFactory.getLog(TestMRJobClient.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestMRJobClient.class);
private Job runJob(Configuration conf) throws Exception {
String input = "hello1\nhello2\nhello3\n";

View File

@ -24,22 +24,21 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestMapCollection {
private static final Log LOG = LogFactory.getLog(
private static final Logger LOG = LoggerFactory.getLogger(
TestMapCollection.class.getName());
public static abstract class FillWritable implements Writable, Configurable {

View File

@ -27,8 +27,6 @@ import java.io.Writer;
import java.util.ArrayList;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@ -42,6 +40,8 @@ import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertTrue;
@ -66,8 +66,8 @@ public class TestValueIterReset {
}
}
private static final Log LOG =
LogFactory.getLog(TestValueIterReset.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestValueIterReset.class);
public static class TestMapper
extends Mapper<LongWritable, Text, IntWritable, IntWritable> {

View File

@ -22,17 +22,17 @@ import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class TimelineEntityConverterV1 {
private static final Log LOG =
LogFactory.getLog(TimelineEntityConverterV1.class);
private static final Logger LOG =
LoggerFactory.getLogger(TimelineEntityConverterV1.class);
static final String JOB = "MAPREDUCE_JOB";
static final String TASK = "MAPREDUCE_TASK";

View File

@ -24,8 +24,8 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
@ -34,8 +34,8 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
class TimelineEntityConverterV2 {
private static final Log LOG =
LogFactory.getLog(TimelineEntityConverterV2.class);
private static final Logger LOG =
LoggerFactory.getLogger(TimelineEntityConverterV2.class);
static final String JOB = "MAPREDUCE_JOB";
static final String TASK = "MAPREDUCE_TASK";

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.mapreduce.lib.db;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -36,6 +34,8 @@ import org.hsqldb.server.Server;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.DataInput;
import java.io.DataOutput;
@ -58,7 +58,7 @@ import static org.junit.Assert.assertTrue;
*/
public class TestDataDrivenDBInputFormat extends HadoopTestCase {
private static final Log LOG = LogFactory.getLog(
private static final Logger LOG = LoggerFactory.getLogger(
TestDataDrivenDBInputFormat.class);
private static final String DB_NAME = "dddbif";

View File

@ -27,8 +27,6 @@ import java.util.BitSet;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -44,10 +42,12 @@ import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestCombineSequenceFileInputFormat {
private static final Log LOG =
LogFactory.getLog(TestCombineSequenceFileInputFormat.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestCombineSequenceFileInputFormat.class);
private static Configuration conf = new Configuration();
private static FileSystem localFs = null;

View File

@ -32,8 +32,6 @@ import java.util.BitSet;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -51,10 +49,12 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestCombineTextInputFormat {
private static final Log LOG =
LogFactory.getLog(TestCombineTextInputFormat.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestCombineTextInputFormat.class);
private static Configuration defaultConf = new Configuration();
private static FileSystem localFs = null;

View File

@ -26,8 +26,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -41,16 +39,19 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
public class TestFixedLengthInputFormat {
private static Log LOG;
private static final Logger LOG =
LoggerFactory.getLogger(TestFixedLengthInputFormat.class);
private static Configuration defaultConf;
private static FileSystem localFs;
private static Path workDir;
@ -62,7 +63,6 @@ public class TestFixedLengthInputFormat {
@BeforeClass
public static void onlyOnce() {
try {
LOG = LogFactory.getLog(TestFixedLengthInputFormat.class.getName());
defaultConf = new Configuration();
defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf);

View File

@ -28,7 +28,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
@ -42,16 +41,15 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class TestMRKeyValueTextInputFormat {
private static final Log LOG =
LogFactory.getLog(TestMRKeyValueTextInputFormat.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestMRKeyValueTextInputFormat.class);
private static Configuration defaultConf = new Configuration();
private static FileSystem localFs = null;

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.mapreduce.lib.input;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -35,6 +33,8 @@ import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Random;
@ -42,8 +42,8 @@ import java.util.Random;
import static org.junit.Assert.assertEquals;
public class TestMRSequenceFileInputFilter {
private static final Log LOG =
LogFactory.getLog(TestMRSequenceFileInputFilter.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestMRSequenceFileInputFilter.class);
private static final int MAX_LENGTH = 15000;
private static final Configuration conf = new Configuration();

View File

@ -22,17 +22,17 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@ -43,8 +43,8 @@ import static org.junit.Assert.assertFalse;
*/
public class TestMapReduceJobControl extends HadoopTestCase {
public static final Log LOG =
LogFactory.getLog(TestMapReduceJobControl.class.getName());
public static final Logger LOG =
LoggerFactory.getLogger(TestMapReduceJobControl.class);
static Path rootDataDir = new Path(
System.getProperty("test.build.data", "."), "TestData");

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.mapreduce.lib.output;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -44,6 +42,8 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Random;
@ -53,8 +53,8 @@ import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestMRSequenceFileAsBinaryOutputFormat {
private static final Log LOG =
LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName());
private static final Logger LOG =
LoggerFactory.getLogger(TestMRSequenceFileAsBinaryOutputFormat.class);
private static final int RECORDS = 10000;

View File

@ -17,15 +17,17 @@
*/
package org.apache.hadoop.mapreduce.lib.partition;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class TestKeyFieldHelper {
private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestKeyFieldHelper.class);
/**
* Test is key-field-helper's parse option.
*/

View File

@ -28,8 +28,6 @@ import java.security.PrivilegedExceptionAction;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.mapred.JobConf;
@ -54,17 +52,19 @@ import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestJHSSecurity {
private static final Log LOG = LogFactory.getLog(TestJHSSecurity.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestJHSSecurity.class);
@Test
public void testDelegationToken() throws IOException, InterruptedException {
Logger rootLogger = LogManager.getRootLogger();
org.apache.log4j.Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
final YarnConfiguration conf = new YarnConfiguration(new JobConf());

View File

@ -24,8 +24,6 @@ import java.util.Date;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -34,6 +32,8 @@ import org.apache.hadoop.util.AsyncDiskService;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is a container of multiple thread pools, each for a volume,
@ -53,7 +53,8 @@ import org.apache.hadoop.classification.InterfaceStability;
@InterfaceStability.Unstable
public class MRAsyncDiskService {
public static final Log LOG = LogFactory.getLog(MRAsyncDiskService.class);
public static final Logger LOG =
LoggerFactory.getLogger(MRAsyncDiskService.class);
AsyncDiskService asyncDiskService;

View File

@ -20,16 +20,14 @@ package org.apache.hadoop.mapreduce.util;
import java.io.File;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@ -42,7 +40,8 @@ import static org.junit.Assert.fail;
*/
public class TestMRAsyncDiskService {
public static final Log LOG = LogFactory.getLog(TestMRAsyncDiskService.class);
public static final Logger LOG =
LoggerFactory.getLogger(TestMRAsyncDiskService.class);
private static String TEST_ROOT_DIR = new Path(System.getProperty(
"test.build.data", "/tmp")).toString();

View File

@ -24,8 +24,6 @@ import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@ -53,6 +51,8 @@ import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor;
import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Configures and starts the MR-specific components in the YARN cluster.
@ -64,7 +64,8 @@ public class MiniMRYarnCluster extends MiniYARNCluster {
public static final String APPJAR = JarFinder.getJar(LocalContainerLauncher.class);
private static final Log LOG = LogFactory.getLog(MiniMRYarnCluster.class);
private static final Logger LOG =
LoggerFactory.getLogger(MiniMRYarnCluster.class);
private JobHistoryServer historyServer;
private JobHistoryServerWrapper historyServerWrapper;
private static final String TIMELINE_AUX_SERVICE_NAME = "timeline_collector";

View File

@ -23,8 +23,6 @@ import java.io.IOException;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapreduce.SleepJob;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -37,9 +35,12 @@ import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestMRAMWithNonNormalizedCapabilities {
private static final Log LOG = LogFactory.getLog(TestMRAMWithNonNormalizedCapabilities.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestMRAMWithNonNormalizedCapabilities.class);
private static FileSystem localFs;
protected static MiniMRYarnCluster mrCluster = null;

View File

@ -23,8 +23,6 @@ import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.CustomOutputCommitter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -47,6 +45,8 @@ import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings("deprecation")
public class TestMRAppWithCombiner {
@ -54,7 +54,8 @@ public class TestMRAppWithCombiner {
protected static MiniMRYarnCluster mrCluster;
private static Configuration conf = new Configuration();
private static FileSystem localFs;
private static final Log LOG = LogFactory.getLog(TestMRAppWithCombiner.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestMRAppWithCombiner.class);
static {
try {

View File

@ -36,8 +36,6 @@ import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.FailingMapper;
import org.apache.hadoop.RandomTextWriterJob;
import org.apache.hadoop.RandomTextWriterJob.RandomInputFormat;
@ -104,10 +102,12 @@ import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestMRJobs {
private static final Log LOG = LogFactory.getLog(TestMRJobs.class);
private static final Logger LOG = LoggerFactory.getLogger(TestMRJobs.class);
private static final EnumSet<RMAppState> TERMINAL_RM_APP_STATES =
EnumSet.of(RMAppState.FINISHED, RMAppState.FAILED, RMAppState.KILLED);
private static final int NUM_NODE_MGRS = 3;

View File

@ -26,8 +26,6 @@ import java.util.List;
import org.junit.Assert;
import org.apache.avro.AvroRemoteException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapreduce.SleepJob;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -52,11 +50,13 @@ import org.apache.hadoop.yarn.util.Records;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestMRJobsWithHistoryService {
private static final Log LOG =
LogFactory.getLog(TestMRJobsWithHistoryService.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestMRJobsWithHistoryService.class);
private static final EnumSet<RMAppState> TERMINAL_RM_APP_STATES =
EnumSet.of(RMAppState.FINISHED, RMAppState.FAILED, RMAppState.KILLED);

View File

@ -27,8 +27,6 @@ import java.util.regex.Pattern;
import org.junit.AfterClass;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapreduce.SleepJob;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
@ -42,11 +40,13 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestMRJobsWithProfiler {
private static final Log LOG =
LogFactory.getLog(TestMRJobsWithProfiler.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestMRJobsWithProfiler.class);
private static final EnumSet<RMAppState> TERMINAL_RM_APP_STATES =
EnumSet.of(RMAppState.FINISHED, RMAppState.FAILED, RMAppState.KILLED);

View File

@ -22,8 +22,6 @@ import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.CustomOutputCommitter;
import org.apache.hadoop.FailMapper;
import org.apache.hadoop.conf.Configuration;
@ -46,10 +44,13 @@ import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestMROldApiJobs {
private static final Log LOG = LogFactory.getLog(TestMROldApiJobs.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestMROldApiJobs.class);
protected static MiniMRYarnCluster mrCluster;
private static Configuration conf = new Configuration();

View File

@ -26,8 +26,6 @@ import java.util.concurrent.ConcurrentMap;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -42,10 +40,13 @@ import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.mockito.Mockito.*;
public class TestRMNMInfo {
private static final Log LOG = LogFactory.getLog(TestRMNMInfo.class);
private static final Logger LOG = LoggerFactory.getLogger(TestRMNMInfo.class);
private static final int NUMNODEMANAGERS = 4;
protected static MiniMRYarnCluster mrCluster;

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapreduce.v2;
import java.io.File;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@ -48,6 +46,8 @@ import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestSpeculativeExecution {
@ -82,7 +82,8 @@ public class TestSpeculativeExecution {
}
}
private static final Log LOG = LogFactory.getLog(TestSpeculativeExecution.class);
private static final Logger LOG =
LoggerFactory.getLogger(TestSpeculativeExecution.class);
protected static MiniMRYarnCluster mrCluster;

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.mapreduce.v2;
import java.io.File;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobCounter;
@ -35,10 +33,12 @@ import org.apache.hadoop.mapreduce.TaskType;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestUberAM extends TestMRJobs {
private static final Log LOG = LogFactory.getLog(TestUberAM.class);
private static final Logger LOG = LoggerFactory.getLogger(TestUberAM.class);
@BeforeClass
public static void setup() throws IOException {

View File

@ -21,28 +21,23 @@ package testjar;
import java.io.IOException;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UserNamePermission
{
private static final Logger LOG =
LoggerFactory.getLogger(UserNamePermission.class);
private static final Log LOG = LogFactory.getLog(UserNamePermission.class);
//This mapper will read the user name and pass in to the reducer
public static class UserNameMapper extends Mapper<LongWritable,Text,Text,Text>
{