From 5afc1242ea323d5d160b8a1c676e499af81f21b4 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Eagles Date: Tue, 3 Dec 2013 22:44:07 +0000 Subject: [PATCH] MAPREDUCE-5645. TestFixedLengthInputFormat fails with native libs (Mit Desai via jeagles) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1547624 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 3 +++ .../mapred/TestFixedLengthInputFormat.java | 22 +++++++++++-------- .../lib/input/TestFixedLengthInputFormat.java | 18 +++++++++------ 3 files changed, 27 insertions(+), 16 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 1c2509584a1..a9527bc4bac 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -226,6 +226,9 @@ Release 2.4.0 - UNRELEASED MAPREDUCE-5631. TestJobEndNotifier.testNotifyRetries fails with Should have taken more than 5 seconds in jdk7 (Jonathan Eagles via jlowe) + MAPREDUCE-5645. TestFixedLengthInputFormat fails with native libs (Mit + Desai via jeagles) + Release 2.3.0 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java index a5f19eeb25d..d0bd7eecdf3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java @@ -197,17 +197,17 @@ public class TestFixedLengthInputFormat { public void testGzipWithTwoInputs() throws IOException { CompressionCodec gzip = new GzipCodec(); localFs.delete(workDir, true); - // Create files with fixed length records with 5 byte long records. - writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, - "one two threefour five six seveneightnine ten "); - writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip, - "ten nine eightsevensix five four threetwo one "); FixedLengthInputFormat format = new FixedLengthInputFormat(); JobConf job = new JobConf(defaultConf); format.setRecordLength(job, 5); FileInputFormat.setInputPaths(job, workDir); ReflectionUtils.setConf(gzip, job); format.configure(job); + // Create files with fixed length records with 5 byte long records. + writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, + "one two threefour five six seveneightnine ten "); + writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip, + "ten nine eightsevensix five four threetwo one "); InputSplit[] splits = format.getSplits(job, 100); assertEquals("compressed splits == 2", 2, splits.length); FileSplit tmp = (FileSplit) splits[0]; @@ -283,12 +283,16 @@ public class TestFixedLengthInputFormat { int fileSize = (totalRecords * recordLength); LOG.info("totalRecords=" + totalRecords + " recordLength=" + recordLength); + // Create the job + JobConf job = new JobConf(defaultConf); + if (codec != null) { + ReflectionUtils.setConf(codec, job); + } // Create the test file ArrayList recordList = createFile(file, codec, recordLength, totalRecords); assertTrue(localFs.exists(file)); - // Create the job and set the fixed length record length config property - JobConf job = new JobConf(defaultConf); + //set the fixed length record length config property for the job FixedLengthInputFormat.setRecordLength(job, recordLength); int numSplits = 1; @@ -383,8 +387,6 @@ public class TestFixedLengthInputFormat { if (codec != null) { fileName.append(".gz"); } - writeFile(localFs, new Path(workDir, fileName.toString()), codec, - "one two threefour five six seveneightnine ten"); FixedLengthInputFormat format = new FixedLengthInputFormat(); JobConf job = new JobConf(defaultConf); format.setRecordLength(job, 5); @@ -393,6 +395,8 @@ public class TestFixedLengthInputFormat { ReflectionUtils.setConf(codec, job); } format.configure(job); + writeFile(localFs, new Path(workDir, fileName.toString()), codec, + "one two threefour five six seveneightnine ten"); InputSplit[] splits = format.getSplits(job, 100); if (codec != null) { assertEquals("compressed splits == 1", 1, splits.length); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java index f00b1a92ac1..cf7c61c09b5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java @@ -225,16 +225,16 @@ public class TestFixedLengthInputFormat { public void testGzipWithTwoInputs() throws Exception { CompressionCodec gzip = new GzipCodec(); localFs.delete(workDir, true); - // Create files with fixed length records with 5 byte long records. - writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, - "one two threefour five six seveneightnine ten "); - writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip, - "ten nine eightsevensix five four threetwo one "); Job job = Job.getInstance(defaultConf); FixedLengthInputFormat format = new FixedLengthInputFormat(); format.setRecordLength(job.getConfiguration(), 5); ReflectionUtils.setConf(gzip, job.getConfiguration()); FileInputFormat.setInputPaths(job, workDir); + // Create files with fixed length records with 5 byte long records. + writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, + "one two threefour five six seveneightnine ten "); + writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip, + "ten nine eightsevensix five four threetwo one "); List splits = format.getSplits(job); assertEquals("compressed splits == 2", 2, splits.size()); FileSplit tmp = (FileSplit) splits.get(0); @@ -310,12 +310,16 @@ public class TestFixedLengthInputFormat { int fileSize = (totalRecords * recordLength); LOG.info("totalRecords=" + totalRecords + " recordLength=" + recordLength); + // Create the job + Job job = Job.getInstance(defaultConf); + if (codec != null) { + ReflectionUtils.setConf(codec, job.getConfiguration()); + } // Create the test file ArrayList recordList = createFile(file, codec, recordLength, totalRecords); assertTrue(localFs.exists(file)); - // Create the job and set the fixed length record length config property - Job job = Job.getInstance(defaultConf); + //set the fixed length record length config property for the job FixedLengthInputFormat.setRecordLength(job.getConfiguration(), recordLength);