svn merge -c 1463221 FIXES: MAPREDUCE-4974. Optimising the LineRecordReader initialize() method (Gelesh via bobby)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1463222 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Joseph Evans 2013-04-01 17:47:19 +00:00
parent db7b5e12d0
commit f47bdbded7
2 changed files with 6 additions and 6 deletions

View File

@ -675,6 +675,9 @@ Release 0.23.7 - UNRELEASED
MAPREDUCE-4822. Unnecessary conversions in History Events. (Chu Tong via
jlowe)
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method
(Gelesh via bobby)
BUG FIXES
MAPREDUCE-4458. Warn if java.library.path is used for AM or Task

View File

@ -81,13 +81,13 @@ public class LineRecordReader extends RecordReader<LongWritable, Text> {
start = split.getStart();
end = start + split.getLength();
final Path file = split.getPath();
compressionCodecs = new CompressionCodecFactory(job);
codec = compressionCodecs.getCodec(file);
// open the file and seek to the start of the split
final FileSystem fs = file.getFileSystem(job);
fileIn = fs.open(file);
if (isCompressedInput()) {
compressionCodecs = new CompressionCodecFactory(job);
codec = compressionCodecs.getCodec(file);
decompressor = CodecPool.getDecompressor(codec);
if (codec instanceof SplittableCompressionCodec) {
final SplitCompressionInputStream cIn =
@ -166,9 +166,6 @@ public class LineRecordReader extends RecordReader<LongWritable, Text> {
while (getFilePosition() <= end) {
newSize = in.readLine(value, maxLineLength,
Math.max(maxBytesToConsume(pos), maxLineLength));
if (newSize == 0) {
break;
}
pos += newSize;
if (newSize < maxLineLength) {
break;