MAPREDUCE-5852. Merging change r1589006 from trunk to branch-2.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1589007 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Chris Nauroth 2014-04-21 23:49:28 +00:00
parent de1f0bf3e4
commit f0d3664d83
68 changed files with 88 additions and 86 deletions

View File

@ -29,6 +29,8 @@ Release 2.5.0 - UNRELEASED
MAPREDUCE-5836. Fix typo in RandomTextWriter (Akira AJISAKA via jeagles)
MAPREDUCE-5852. Prepare MapReduce codebase for JUnit 4.11. (cnauroth)
OPTIMIZATIONS
BUG FIXES

View File

@ -24,7 +24,7 @@ import java.io.DataInputStream;
import java.util.ArrayList;
import java.util.Arrays;
import static junit.framework.Assert.*;
import static org.junit.Assert.*;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.jobhistory;
import static junit.framework.Assert.assertTrue;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
@ -30,7 +30,7 @@ import static org.mockito.Mockito.never;
import java.io.File;
import java.io.IOException;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -501,4 +501,4 @@ class JHEventHandlerForSigtermTest extends JobHistoryEventHandler {
this.lastEventHandled = event;
this.eventsHandled++;
}
}
}

View File

@ -24,7 +24,7 @@ import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.EnumSet;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.mapreduce.v2.app;
import java.util.Iterator;
import java.util.List;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRJobConfig;

View File

@ -23,7 +23,7 @@ import java.net.InetSocketAddress;
import java.util.Iterator;
import java.util.Map;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.TaskAttemptListenerImpl;

View File

@ -22,7 +22,7 @@ import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;

View File

@ -26,7 +26,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRJobConfig;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.mapreduce.v2.app;
import java.io.IOException;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;

View File

@ -31,7 +31,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;

View File

@ -24,7 +24,7 @@ import java.security.PrivilegedExceptionAction;
import java.util.Iterator;
import java.util.List;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobACL;

View File

@ -40,7 +40,7 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -34,7 +34,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -28,7 +28,7 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import junit.framework.Assert;
import org.junit.Assert;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;

View File

@ -27,7 +27,7 @@ import static org.mockito.Mockito.when;
import java.util.concurrent.ConcurrentLinkedQueue;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobContext;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.mapreduce.v2.app.job.impl;
import java.util.Map;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -33,7 +33,7 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;

View File

@ -27,7 +27,7 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileStatus;

View File

@ -30,7 +30,7 @@ import java.util.Map;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -439,4 +439,4 @@ public class TestContainerLauncher {
throw new IOException(e);
}
}
}
}

View File

@ -31,7 +31,7 @@ import java.util.Map.Entry;
import javax.net.ssl.SSLException;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.hadoop.conf.Configuration;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapred;
import static junit.framework.Assert.assertNotNull;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;

View File

@ -27,7 +27,7 @@ import java.util.Arrays;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import junit.framework.Assert;
import org.junit.Assert;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;

View File

@ -22,7 +22,7 @@ package org.apache.hadoop.mapreduce.v2;
import java.io.IOException;
import java.net.InetSocketAddress;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.Server;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.v2;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;

View File

@ -407,7 +407,7 @@ public class TestMRApps {
URI file = new URI("mockfs://mock/tmp/something.zip#something");
Path filePath = new Path(file);
URI file2 = new URI("mockfs://mock/tmp/something.txt#something");
Path file2Path = new Path(file);
Path file2Path = new Path(file2);
when(mockFs.resolvePath(filePath)).thenReturn(filePath);
when(mockFs.resolvePath(file2Path)).thenReturn(file2Path);

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapred;
import junit.framework.Assert;
import org.junit.Assert;
import org.junit.Test;

View File

@ -26,7 +26,7 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.Random;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -23,7 +23,7 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import junit.framework.Assert;
import org.junit.Assert;
import org.junit.Test;

View File

@ -35,7 +35,7 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static junit.framework.Assert.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**

View File

@ -17,8 +17,8 @@
*/
package org.apache.hadoop.mapred;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

View File

@ -22,7 +22,7 @@ import java.io.FileWriter;
import java.io.IOException;
import java.io.File;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;

View File

@ -26,7 +26,7 @@ import java.util.Set;
import javax.annotation.Nullable;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -30,7 +30,7 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -193,7 +193,7 @@ public class TestMerger {
RawKeyValueIterator mergeQueue = Merger.merge(conf, fs, keyClass,
valueClass, segments, 2, tmpDir, comparator, getReporter(),
readsCounter, writesCounter, mergePhase);
Assert.assertEquals(1.0f, mergeQueue.getProgress().get());
Assert.assertEquals(1.0f, mergeQueue.getProgress().get(), 0.0f);
}
private Progressable getReporter() {
@ -274,4 +274,4 @@ public class TestMerger {
}
};
}
}
}

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.mapreduce.v2.hs;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapreduce.v2.hs;
import static junit.framework.Assert.assertEquals;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Collection;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.mapreduce.v2.hs;
import java.util.Map;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -36,7 +36,7 @@ import java.util.Map;
import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -24,7 +24,7 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.HashMap;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.conf;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapreduce.MRConfig;

View File

@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
import java.net.URI;
import junit.framework.Assert;
import org.junit.Assert;
public class MRCaching {
static String testStr = "This is a test file " + "used for testing caching "

View File

@ -24,7 +24,7 @@ import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Iterator;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -31,7 +31,7 @@ import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.Collection;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobID;
@ -218,8 +218,8 @@ public class TestClientServiceDelegate {
Assert.assertNotNull(jobStatus);
Assert.assertEquals("TestJobFilePath", jobStatus.getJobFile());
Assert.assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl());
Assert.assertEquals(1.0f, jobStatus.getMapProgress());
Assert.assertEquals(1.0f, jobStatus.getReduceProgress());
Assert.assertEquals(1.0f, jobStatus.getMapProgress(), 0.0f);
Assert.assertEquals(1.0f, jobStatus.getReduceProgress(), 0.0f);
}
@Test
@ -358,8 +358,8 @@ public class TestClientServiceDelegate {
Assert.assertNotNull(jobStatus1);
Assert.assertEquals("TestJobFilePath", jobStatus1.getJobFile());
Assert.assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl());
Assert.assertEquals(1.0f, jobStatus1.getMapProgress());
Assert.assertEquals(1.0f, jobStatus1.getReduceProgress());
Assert.assertEquals(1.0f, jobStatus1.getMapProgress(), 0.0f);
Assert.assertEquals(1.0f, jobStatus1.getReduceProgress(), 0.0f);
verify(clientServiceDelegate, times(0)).instantiateAMProxy(
any(InetSocketAddress.class));

View File

@ -28,7 +28,7 @@ import org.apache.hadoop.mapred.lib.CombineFileSplit;
import org.apache.hadoop.mapred.lib.CombineFileRecordReader;
import org.junit.Test;
import static junit.framework.Assert.*;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.mapred;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.io.IOException;
import java.util.BitSet;

View File

@ -18,9 +18,9 @@
package org.apache.hadoop.mapred;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.fail;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.io.OutputStream;

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Ignore;
import org.junit.Test;
import static junit.framework.Assert.*;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.util.ReflectionUtils;
import org.junit.BeforeClass;
import org.junit.Test;
import static junit.framework.Assert.*;
import static org.junit.Assert.*;
public class TestFixedLengthInputFormat {

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapred;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.RawComparator;

View File

@ -22,7 +22,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.JobStatus.State;

View File

@ -40,7 +40,7 @@ import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import static junit.framework.Assert.*;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -24,7 +24,7 @@ import static org.mockito.Mockito.when;
import java.util.ArrayList;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapred.lib;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reducer;

View File

@ -29,7 +29,7 @@ import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.PrintStream;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.RawComparator;

View File

@ -29,7 +29,7 @@ import java.util.TreeMap;
import java.util.concurrent.TimeoutException;
import java.util.zip.GZIPOutputStream;
import junit.framework.Assert;
import org.junit.Assert;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;

View File

@ -18,9 +18,9 @@
package org.apache.hadoop.mapreduce.lib.input;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertNotNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.util.BitSet;

View File

@ -18,10 +18,10 @@
package org.apache.hadoop.mapreduce.lib.input;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.fail;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.io.OutputStream;

View File

@ -46,7 +46,7 @@ import org.apache.hadoop.util.ReflectionUtils;
import org.junit.BeforeClass;
import org.junit.Test;
import static junit.framework.Assert.*;
import static org.junit.Assert.*;
public class TestFixedLengthInputFormat {

View File

@ -44,7 +44,7 @@ import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import static junit.framework.Assert.*;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -22,7 +22,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.lib.jobcontrol;
import static junit.framework.Assert.assertEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;

View File

@ -26,7 +26,7 @@ import java.net.InetSocketAddress;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.mapreduce.v2;
import java.io.File;
import java.io.IOException;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -118,4 +118,4 @@ public class TestMRAMWithNonNormalizedCapabilities {
mrCluster.stop();
}
}
}
}

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import java.util.EnumSet;
import java.util.List;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.avro.AvroRemoteException;
import org.apache.commons.logging.Log;

View File

@ -24,7 +24,7 @@ import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -23,7 +23,7 @@ import java.util.Iterator;
import java.util.Map;
import java.util.Random;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.tools.mapred.lib;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -23,7 +23,7 @@ import java.io.DataInput;
import java.io.DataInputStream;
import java.io.IOException;
import junit.framework.Assert;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;