MAPREDUCE-4974. Optimising the LineRecordReader initialize() method (Gelesh via bobby)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1463221 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Joseph Evans 2013-04-01 17:45:07 +00:00
parent aa634e0814
commit b55756dd03
2 changed files with 6 additions and 6 deletions

View File

@ -733,6 +733,9 @@ Release 0.23.7 - UNRELEASED
MAPREDUCE-4822. Unnecessary conversions in History Events. (Chu Tong via
jlowe)
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method
(Gelesh via bobby)
BUG FIXES
MAPREDUCE-4458. Warn if java.library.path is used for AM or Task

View File

@ -81,13 +81,13 @@ public class LineRecordReader extends RecordReader<LongWritable, Text> {
start = split.getStart();
end = start + split.getLength();
final Path file = split.getPath();
compressionCodecs = new CompressionCodecFactory(job);
codec = compressionCodecs.getCodec(file);
// open the file and seek to the start of the split
final FileSystem fs = file.getFileSystem(job);
fileIn = fs.open(file);
if (isCompressedInput()) {
compressionCodecs = new CompressionCodecFactory(job);
codec = compressionCodecs.getCodec(file);
decompressor = CodecPool.getDecompressor(codec);
if (codec instanceof SplittableCompressionCodec) {
final SplitCompressionInputStream cIn =
@ -166,9 +166,6 @@ public class LineRecordReader extends RecordReader<LongWritable, Text> {
while (getFilePosition() <= end) {
newSize = in.readLine(value, maxLineLength,
Math.max(maxBytesToConsume(pos), maxLineLength));
if (newSize == 0) {
break;
}
pos += newSize;
if (newSize < maxLineLength) {
break;
@ -219,4 +216,4 @@ public class LineRecordReader extends RecordReader<LongWritable, Text> {
}
}
}
}
}