MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is sucessful (Contributed by Binglin Chang)
This commit is contained in:
parent
bfd1d75d87
commit
17cd0faace
|
@ -17,3 +17,4 @@ MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
|
|||
MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang via todd)
|
||||
MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
|
||||
MAPREDUCE-6054. native-task: Speed up tests (todd)
|
||||
MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is sucessful (Binglin Chang)
|
||||
|
|
|
@ -83,10 +83,8 @@
|
|||
</property>
|
||||
</properties>
|
||||
<includes>
|
||||
<include>**/TestTaskContext.java</include>
|
||||
<include>**/buffer/Test*.java</include>
|
||||
<include>**/handlers/Test*.java</include>
|
||||
<include>**/serde/Test*.java</include>
|
||||
<include>**/*Test.java</include>
|
||||
<include>**/Test*.java</include>
|
||||
</includes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
@ -201,28 +199,6 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.apache.hadoop.test.TimedOutTestsListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
<includes>
|
||||
<include>**/TestTaskContext.java</include>
|
||||
<include>**/buffer/Test*.java</include>
|
||||
<include>**/handlers/Test*.java</include>
|
||||
<include>**/serde/Test*.java</include>
|
||||
<include>**/combinertest/*Test.java</include>
|
||||
<include>**/compresstest/*Test.java</include>
|
||||
<include>**/nonsorttest/*Test.java</include>
|
||||
<include>**/kvtest/*Test.java</include>
|
||||
</includes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.mapred.nativetask.combinertest;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
@ -25,6 +26,7 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.Task;
|
||||
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
|
||||
import org.apache.hadoop.mapred.nativetask.combinertest.WordCount.IntSumReducer;
|
||||
import org.apache.hadoop.mapred.nativetask.combinertest.WordCount.TokenizerMapper;
|
||||
import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
|
||||
|
@ -36,6 +38,8 @@ import org.apache.hadoop.mapreduce.Job;
|
|||
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.util.NativeCodeLoader;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -58,11 +62,11 @@ public class CombinerTest {
|
|||
|
||||
final Job normaljob = getJob("normalwordcount", commonConf, inputpath, hadoopoutputpath);
|
||||
|
||||
nativejob.waitForCompletion(true);
|
||||
assertTrue(nativejob.waitForCompletion(true));
|
||||
|
||||
Counter nativeReduceGroups = nativejob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
|
||||
|
||||
normaljob.waitForCompletion(true);
|
||||
assertTrue(normaljob.waitForCompletion(true));
|
||||
Counter normalReduceGroups = normaljob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
|
||||
|
||||
assertEquals(true, ResultVerifier.verify(nativeoutputpath, hadoopoutputpath));
|
||||
|
@ -77,6 +81,8 @@ public class CombinerTest {
|
|||
|
||||
@Before
|
||||
public void startUp() throws Exception {
|
||||
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
|
||||
final ScenarioConfiguration conf = new ScenarioConfiguration();
|
||||
conf.addcombinerConf();
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.mapred.nativetask.combinertest;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -27,17 +28,27 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.Task;
|
||||
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
|
||||
import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
|
||||
import org.apache.hadoop.mapreduce.Counter;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.util.NativeCodeLoader;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class LargeKVCombinerTest {
|
||||
private static final Log LOG = LogFactory.getLog(LargeKVCombinerTest.class);
|
||||
|
||||
@Before
|
||||
public void startUp() throws Exception {
|
||||
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLargeValueCombiner(){
|
||||
final Configuration normalConf = ScenarioConfiguration.getNormalConfiguration();
|
||||
|
@ -74,10 +85,10 @@ public class LargeKVCombinerTest {
|
|||
final Job normaljob = CombinerTest.getJob("normalwordcount", normalConf, inputPath, hadoopOutputPath);
|
||||
final Job nativejob = CombinerTest.getJob("nativewordcount", nativeConf, inputPath, nativeOutputPath);
|
||||
|
||||
nativejob.waitForCompletion(true);
|
||||
assertTrue(nativejob.waitForCompletion(true));
|
||||
Counter nativeReduceGroups = nativejob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
|
||||
|
||||
normaljob.waitForCompletion(true);
|
||||
assertTrue(normaljob.waitForCompletion(true));
|
||||
Counter normalReduceGroups = normaljob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
|
||||
|
||||
final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath);
|
||||
|
|
|
@ -32,11 +32,14 @@ import org.apache.hadoop.mapred.RunningJob;
|
|||
import org.apache.hadoop.mapred.SequenceFileInputFormat;
|
||||
import org.apache.hadoop.mapred.Task;
|
||||
import org.apache.hadoop.mapred.TextOutputFormat;
|
||||
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
|
||||
import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
|
||||
import org.apache.hadoop.mapreduce.Counter;
|
||||
import org.apache.hadoop.util.NativeCodeLoader;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -70,6 +73,8 @@ public class OldAPICombinerTest {
|
|||
|
||||
@Before
|
||||
public void startUp() throws Exception {
|
||||
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
|
||||
final ScenarioConfiguration conf = new ScenarioConfiguration();
|
||||
conf.addcombinerConf();
|
||||
this.fs = FileSystem.get(conf);
|
||||
|
|
|
@ -18,16 +18,20 @@
|
|||
package org.apache.hadoop.mapred.nativetask.compresstest;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
|
||||
import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.util.NativeCodeLoader;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -38,12 +42,12 @@ public class CompressTest {
|
|||
final Configuration conf = ScenarioConfiguration.getNativeConfiguration();
|
||||
conf.addResource(TestConstants.SNAPPY_COMPRESS_CONF_PATH);
|
||||
final Job job = CompressMapper.getCompressJob("nativesnappy", conf);
|
||||
job.waitForCompletion(true);
|
||||
assertTrue(job.waitForCompletion(true));
|
||||
|
||||
final Configuration hadoopconf = ScenarioConfiguration.getNormalConfiguration();
|
||||
hadoopconf.addResource(TestConstants.SNAPPY_COMPRESS_CONF_PATH);
|
||||
final Job hadoopjob = CompressMapper.getCompressJob("hadoopsnappy", hadoopconf);
|
||||
hadoopjob.waitForCompletion(true);
|
||||
assertTrue(hadoopjob.waitForCompletion(true));
|
||||
|
||||
final boolean compareRet = ResultVerifier.verify(CompressMapper.outputFileDir + "nativesnappy",
|
||||
CompressMapper.outputFileDir + "hadoopsnappy");
|
||||
|
@ -55,12 +59,12 @@ public class CompressTest {
|
|||
final Configuration conf = ScenarioConfiguration.getNativeConfiguration();
|
||||
conf.addResource(TestConstants.GZIP_COMPRESS_CONF_PATH);
|
||||
final Job job = CompressMapper.getCompressJob("nativegzip", conf);
|
||||
job.waitForCompletion(true);
|
||||
assertTrue(job.waitForCompletion(true));
|
||||
|
||||
final Configuration hadoopconf = ScenarioConfiguration.getNormalConfiguration();
|
||||
hadoopconf.addResource(TestConstants.GZIP_COMPRESS_CONF_PATH);
|
||||
final Job hadoopjob = CompressMapper.getCompressJob("hadoopgzip", hadoopconf);
|
||||
hadoopjob.waitForCompletion(true);
|
||||
assertTrue(hadoopjob.waitForCompletion(true));
|
||||
|
||||
final boolean compareRet = ResultVerifier.verify(CompressMapper.outputFileDir + "nativegzip",
|
||||
CompressMapper.outputFileDir + "hadoopgzip");
|
||||
|
@ -72,12 +76,12 @@ public class CompressTest {
|
|||
final Configuration nativeConf = ScenarioConfiguration.getNativeConfiguration();
|
||||
nativeConf.addResource(TestConstants.LZ4_COMPRESS_CONF_PATH);
|
||||
final Job nativeJob = CompressMapper.getCompressJob("nativelz4", nativeConf);
|
||||
nativeJob.waitForCompletion(true);
|
||||
assertTrue(nativeJob.waitForCompletion(true));
|
||||
|
||||
final Configuration hadoopConf = ScenarioConfiguration.getNormalConfiguration();
|
||||
hadoopConf.addResource(TestConstants.LZ4_COMPRESS_CONF_PATH);
|
||||
final Job hadoopJob = CompressMapper.getCompressJob("hadooplz4", hadoopConf);
|
||||
hadoopJob.waitForCompletion(true);
|
||||
assertTrue(hadoopJob.waitForCompletion(true));
|
||||
final boolean compareRet = ResultVerifier.verify(CompressMapper.outputFileDir + "nativelz4",
|
||||
CompressMapper.outputFileDir + "hadooplz4");
|
||||
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
|
||||
|
@ -85,6 +89,8 @@ public class CompressTest {
|
|||
|
||||
@Before
|
||||
public void startUp() throws Exception {
|
||||
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
|
||||
final ScenarioConfiguration conf = new ScenarioConfiguration();
|
||||
final FileSystem fs = FileSystem.get(conf);
|
||||
final Path path = new Path(CompressMapper.inputFile);
|
||||
|
|
|
@ -96,8 +96,7 @@ public class KVJob {
|
|||
FileOutputFormat.setOutputPath(job, new Path(outputpath));
|
||||
}
|
||||
|
||||
public void runJob() throws Exception {
|
||||
|
||||
job.waitForCompletion(true);
|
||||
public boolean runJob() throws Exception {
|
||||
return job.waitForCompletion(true);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.mapred.nativetask.kvtest;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -28,9 +29,13 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
|
||||
import org.apache.hadoop.util.NativeCodeLoader;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
@ -100,6 +105,12 @@ public class KVTest {
|
|||
this.valueclass = valueclass;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void startUp() throws Exception {
|
||||
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKVCompability() {
|
||||
try {
|
||||
|
@ -139,7 +150,7 @@ public class KVTest {
|
|||
nativekvtestconf.set(TestConstants.NATIVETASK_KVTEST_CREATEFILE, "true");
|
||||
try {
|
||||
final KVJob keyJob = new KVJob(jobname, nativekvtestconf, keyclass, valueclass, inputpath, outputpath);
|
||||
keyJob.runJob();
|
||||
assertTrue("job should complete successfully", keyJob.runJob());
|
||||
} catch (final Exception e) {
|
||||
return "native testcase run time error.";
|
||||
}
|
||||
|
@ -161,7 +172,7 @@ public class KVTest {
|
|||
hadoopkvtestconf.set(TestConstants.NATIVETASK_KVTEST_CREATEFILE, "false");
|
||||
try {
|
||||
final KVJob keyJob = new KVJob(jobname, hadoopkvtestconf, keyclass, valueclass, inputpath, outputpath);
|
||||
keyJob.runJob();
|
||||
assertTrue("job should complete successfully", keyJob.runJob());
|
||||
} catch (final Exception e) {
|
||||
return "normal testcase run time error.";
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.mapred.nativetask.kvtest;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -28,14 +29,24 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
|
||||
import org.apache.hadoop.util.NativeCodeLoader;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class LargeKVTest {
|
||||
private static final Log LOG = LogFactory.getLog(LargeKVTest.class);
|
||||
|
||||
@Before
|
||||
public void startUp() throws Exception {
|
||||
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKeySize() {
|
||||
runKVSizeTests(Text.class, IntWritable.class);
|
||||
|
@ -104,7 +115,7 @@ public class LargeKVTest {
|
|||
fs.close();
|
||||
try {
|
||||
final KVJob keyJob = new KVJob(jobname, conf, keyclass, valueclass, inputpath, outputpath);
|
||||
keyJob.runJob();
|
||||
assertTrue("job should complete successfully", keyJob.runJob());
|
||||
} catch (final Exception e) {
|
||||
return "normal testcase run time error.";
|
||||
}
|
||||
|
@ -123,7 +134,7 @@ public class LargeKVTest {
|
|||
fs.close();
|
||||
try {
|
||||
final KVJob keyJob = new KVJob(jobname, conf, keyclass, valueclass, inputpath, outputpath);
|
||||
keyJob.runJob();
|
||||
assertTrue("job should complete successfully", keyJob.runJob());
|
||||
} catch (final Exception e) {
|
||||
return "normal testcase run time error.";
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
|
||||
import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
|
||||
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
|
||||
|
@ -36,6 +37,8 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
|||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||
import org.apache.hadoop.util.NativeCodeLoader;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -65,6 +68,8 @@ public class NonSortTest {
|
|||
|
||||
@Before
|
||||
public void startUp() throws Exception {
|
||||
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
|
||||
final ScenarioConfiguration configuration = new ScenarioConfiguration();
|
||||
configuration.addNonSortTestConf();
|
||||
final FileSystem fs = FileSystem.get(configuration);
|
||||
|
|
Loading…
Reference in New Issue