Merge -r 1233089:1233090 from trunk to branch. FIXES: MAPREDUCE-3582

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1234100 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2012-01-20 19:54:20 +00:00
parent 716895487c
commit 25e8aad054
322 changed files with 655 additions and 25 deletions

View File

@ -466,6 +466,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3684. LocalDistributedCacheManager does not shut down its thread MAPREDUCE-3684. LocalDistributedCacheManager does not shut down its thread
pool (tomwhite) pool (tomwhite)
MAPREDUCE-3582. Move successfully passing MR1 tests to MR2 maven tree.
(ahmed via tucu)
Release 0.23.0 - 2011-11-01 Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -35,6 +35,7 @@ import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -49,6 +50,7 @@ import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapred.JobACLsManager; import org.apache.hadoop.mapred.JobACLsManager;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobSummary; import org.apache.hadoop.mapreduce.jobhistory.JobSummary;
@ -86,6 +88,9 @@ public class JobHistory extends AbstractService implements HistoryContext {
private static final Log LOG = LogFactory.getLog(JobHistory.class); private static final Log LOG = LogFactory.getLog(JobHistory.class);
private static final Log SUMMARY_LOG = LogFactory.getLog(JobSummary.class); private static final Log SUMMARY_LOG = LogFactory.getLog(JobSummary.class);
public static final Pattern CONF_FILENAME_REGEX =
Pattern.compile("(" + JobID.JOBID_REGEX + ")_conf.xml(?:\\.[0-9]+\\.old)?");
public static final String OLD_SUFFIX = ".old";
private static String DONE_BEFORE_SERIAL_TAIL = private static String DONE_BEFORE_SERIAL_TAIL =
JobHistoryUtils.doneSubdirsBeforeSerialTail(); JobHistoryUtils.doneSubdirsBeforeSerialTail();

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapred.*;
import org.junit.Ignore;
/** /**
* Distributed i/o benchmark. * Distributed i/o benchmark.
@ -66,6 +67,7 @@ import org.apache.hadoop.mapred.*;
* <li>standard i/o rate deviation</li> * <li>standard i/o rate deviation</li>
* </ul> * </ul>
*/ */
@Ignore
public class DFSCIOTest extends TestCase { public class DFSCIOTest extends TestCase {
// Constants // Constants
private static final Log LOG = LogFactory.getLog(DFSCIOTest.class); private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapred.*;
import org.junit.Ignore;
/** /**
* Distributed checkup of the file system consistency. * Distributed checkup of the file system consistency.
@ -52,6 +53,7 @@ import org.apache.hadoop.mapred.*;
* Optionally displays statistics on read performance. * Optionally displays statistics on read performance.
* *
*/ */
@Ignore
public class DistributedFSCheck extends TestCase { public class DistributedFSCheck extends TestCase {
// Constants // Constants
private static final Log LOG = LogFactory.getLog(DistributedFSCheck.class); private static final Log LOG = LogFactory.getLog(DistributedFSCheck.class);

View File

@ -35,10 +35,12 @@ import org.apache.hadoop.mapred.JobStatus;
import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.net.StandardSocketFactory; import org.apache.hadoop.net.StandardSocketFactory;
import org.junit.Ignore;
/** /**
* This class checks that RPCs can use specialized socket factories. * This class checks that RPCs can use specialized socket factories.
*/ */
@Ignore
public class TestSocketFactory extends TestCase { public class TestSocketFactory extends TestCase {
/** /**

View File

@ -39,7 +39,8 @@ import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Ignore;
@Ignore
public class TestBadRecords extends ClusterMapReduceTestCase { public class TestBadRecords extends ClusterMapReduceTestCase {
private static final Log LOG = private static final Log LOG =

View File

@ -20,9 +20,12 @@ package org.apache.hadoop.mapred;
import java.io.IOException; import java.io.IOException;
import org.junit.Ignore;
/** /**
* Tests Job end notification in cluster mode. * Tests Job end notification in cluster mode.
*/ */
@Ignore
public class TestClusterMRNotification extends NotificationTestCase { public class TestClusterMRNotification extends NotificationTestCase {
public TestClusterMRNotification() throws IOException { public TestClusterMRNotification() throws IOException {

View File

@ -21,10 +21,11 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.junit.Ignore;
import java.io.*; import java.io.*;
import java.util.Properties; import java.util.Properties;
@Ignore
public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase { public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
public void _testMapReduce(boolean restart) throws Exception { public void _testMapReduce(boolean restart) throws Exception {
OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));

View File

@ -28,12 +28,13 @@ import org.apache.hadoop.fs.*;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.Ignore;
/** /**
* check for the job submission options of * check for the job submission options of
* -libjars -files -archives * -libjars -files -archives
*/ */
@Ignore
public class TestCommandLineJobSubmission extends TestCase { public class TestCommandLineJobSubmission extends TestCase {
// Input output paths for this.. // Input output paths for this..
// these are all dummy and does not test // these are all dummy and does not test

View File

@ -36,12 +36,13 @@ import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import static junit.framework.Assert.*; import static junit.framework.Assert.*;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@Ignore
public class TestConcatenatedCompressedInput { public class TestConcatenatedCompressedInput {
private static final Log LOG = private static final Log LOG =
LogFactory.getLog(TestConcatenatedCompressedInput.class.getName()); LogFactory.getLog(TestConcatenatedCompressedInput.class.getName());

View File

@ -29,7 +29,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.TestMRJobClient; import org.apache.hadoop.mapreduce.TestMRJobClient;
import org.apache.hadoop.mapreduce.tools.CLI; import org.apache.hadoop.mapreduce.tools.CLI;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.junit.Ignore;
@Ignore
public class TestJobClient extends TestMRJobClient { public class TestJobClient extends TestMRJobClient {
private String runJob() throws Exception { private String runJob() throws Exception {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import java.io.File; import java.io.File;
import java.net.URLClassLoader; import java.net.URLClassLoader;
@ -29,7 +30,7 @@ import org.apache.hadoop.fs.FileUtil;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@Ignore
public class TestJobConf { public class TestJobConf {
private static final String JAR_RELATIVE_PATH = private static final String JAR_RELATIVE_PATH =
"build/test/mapred/testjar/testjob.jar"; "build/test/mapred/testjar/testjob.jar";

View File

@ -33,7 +33,8 @@ import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.io.serializer.JavaSerializationComparator;
import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.junit.Ignore;
@Ignore
public class TestJobName extends ClusterMapReduceTestCase { public class TestJobName extends ClusterMapReduceTestCase {
public void testComplexName() throws Exception { public void testComplexName() throws Exception {

View File

@ -34,12 +34,14 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.Ignore;
/** /**
* Class to test mapred task's * Class to test mapred task's
* - temp directory * - temp directory
* - child env * - child env
*/ */
@Ignore
public class TestMiniMRChildTask extends TestCase { public class TestMiniMRChildTask extends TestCase {
private static final Log LOG = private static final Log LOG =
LogFactory.getLog(TestMiniMRChildTask.class.getName()); LogFactory.getLog(TestMiniMRChildTask.class.getName());

View File

@ -30,11 +30,13 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.junit.Ignore;
/** /**
* A JUnit test to test Mini Map-Reduce Cluster with multiple directories * A JUnit test to test Mini Map-Reduce Cluster with multiple directories
* and check for correct classpath * and check for correct classpath
*/ */
@Ignore
public class TestMiniMRClasspath extends TestCase { public class TestMiniMRClasspath extends TestCase {

Some files were not shown because too many files have changed in this diff Show More