diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 01ffe9f3a05..8f4185d88d7 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -46,6 +46,9 @@ Release 0.23.3 - UNRELEASED MAPREDUCE-3728. ShuffleHandler can't access results when configured in a secure mode (ahmed via tucu) + MAPREDUCE-3952. In MR2, when Total input paths to process == 1, + CombinefileInputFormat.getSplits() returns 0 split. (zhenxiao via tucu) + Release 0.23.2 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java index d213f592716..f991f2a6501 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java @@ -514,7 +514,7 @@ public abstract class CombineFileInputFormat long left = locations[i].getLength(); long myOffset = locations[i].getOffset(); long myLength = 0; - while (left > 0) { + do { if (maxSize == 0) { myLength = left; } else { @@ -536,7 +536,7 @@ public abstract class CombineFileInputFormat myOffset += myLength; blocksList.add(oneblock); - } + } while (left > 0); } blocks = blocksList.toArray(new OneBlockInfo[blocksList.size()]); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java index 9369ffc7404..a46173f15ff 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java @@ -42,6 +42,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; +import org.junit.Test; public class TestCombineFileInputFormat extends TestCase { @@ -1111,6 +1112,34 @@ public class TestCombineFileInputFormat extends TestCase { } } } + + /** + * Test when the input file's length is 0. + */ + @Test + public void testForEmptyFile() throws Exception { + Configuration conf = new Configuration(); + FileSystem fileSys = FileSystem.get(conf); + Path file = new Path("test" + "/file"); + FSDataOutputStream out = fileSys.create(file, true, + conf.getInt("io.file.buffer.size", 4096), (short) 1, (long) BLOCKSIZE); + out.write(new byte[0]); + out.close(); + + // split it using a CombinedFile input format + DummyInputFormat inFormat = new DummyInputFormat(); + Job job = Job.getInstance(conf); + FileInputFormat.setInputPaths(job, "test"); + List splits = inFormat.getSplits(job); + assertEquals(splits.size(), 1); + CombineFileSplit fileSplit = (CombineFileSplit) splits.get(0); + assertEquals(1, fileSplit.getNumPaths()); + assertEquals(file.getName(), fileSplit.getPath(0).getName()); + assertEquals(0, fileSplit.getOffset(0)); + assertEquals(0, fileSplit.getLength(0)); + + fileSys.delete(file.getParent(), true); + } static class TestFilter implements PathFilter { private Path p;