MAPREDUCE-6554. MRAppMaster servicestart failing with NPE in MRAppMaster#parsePreviousJobHistory. Contributed by Bibin A Chundatt

This commit is contained in:
Jason Lowe 2016-01-15 16:59:44 +00:00
parent f7d3870034
commit 296ecbd5ec
2 changed files with 26 additions and 11 deletions

View File

@ -28,6 +28,9 @@ Release 2.7.3 - UNRELEASED
MAPREDUCE-6583. Clarify confusing sentence in MapReduce tutorial document. MAPREDUCE-6583. Clarify confusing sentence in MapReduce tutorial document.
(Kai Sasaki via aajisaka) (Kai Sasaki via aajisaka)
MAPREDUCE-6554. MRAppMaster servicestart failing with NPE in
MRAppMaster#parsePreviousJobHistory (Bibin A Chundatt via jlowe)
Release 2.7.2 - UNRELEASED Release 2.7.2 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES
@ -325,6 +328,9 @@ Release 2.6.4 - UNRELEASED
MAPREDUCE-6363. [NNBench] Lease mismatch error when running with multiple MAPREDUCE-6363. [NNBench] Lease mismatch error when running with multiple
mappers. (Vlad Sharanhovich and Bibin A Chundatt via aajisaka) mappers. (Vlad Sharanhovich and Bibin A Chundatt via aajisaka)
MAPREDUCE-6554. MRAppMaster servicestart failing with NPE in
MRAppMaster#parsePreviousJobHistory (Bibin A Chundatt via jlowe)
Release 2.6.3 - 2015-12-17 Release 2.6.3 - 2015-12-17
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -20,9 +20,16 @@ package org.apache.hadoop.mapreduce.jobhistory;
import java.io.Closeable; import java.io.Closeable;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.IOException;
import java.io.EOFException; import java.io.EOFException;
import java.io.IOException;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Schema;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -31,13 +38,6 @@ import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringInterner;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.DatumReader;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.specific.SpecificDatumReader;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class EventReader implements Closeable { public class EventReader implements Closeable {
@ -72,9 +72,18 @@ public class EventReader implements Closeable {
} }
Schema myschema = new SpecificData(Event.class.getClassLoader()).getSchema(Event.class); Schema myschema = new SpecificData(Event.class.getClassLoader()).getSchema(Event.class);
this.schema = Schema.parse(in.readLine()); String eventschema = in.readLine();
this.reader = new SpecificDatumReader(schema, myschema); if (null != eventschema) {
this.decoder = DecoderFactory.get().jsonDecoder(schema, in); try {
this.schema = Schema.parse(eventschema);
this.reader = new SpecificDatumReader(schema, myschema);
this.decoder = DecoderFactory.get().jsonDecoder(schema, in);
} catch (AvroRuntimeException e) {
throw new IOException(e);
}
} else {
throw new IOException("Event schema string not parsed since its null");
}
} }
/** /**