MAPREDUCE-2779. JobSplitWriter.java can't handle large job.split file. Contributed by Ming Ma.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1177783 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b7b30d3fee
commit
1efe4e403d
|
@ -2080,6 +2080,9 @@ Release 0.21.1 - Unreleased
|
||||||
MAPREDUCE-2127. mapreduce trunk builds are filing on hudson.
|
MAPREDUCE-2127. mapreduce trunk builds are filing on hudson.
|
||||||
(Bruno Mahé via eli)
|
(Bruno Mahé via eli)
|
||||||
|
|
||||||
|
MAPREDUCE-2779. JobSplitWriter.java can't handle large job.split file.
|
||||||
|
(Ming Ma via shv)
|
||||||
|
|
||||||
Release 0.21.0 - 2010-08-13
|
Release 0.21.0 - 2010-08-13
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -114,15 +114,15 @@ public class JobSplitWriter {
|
||||||
if (array.length != 0) {
|
if (array.length != 0) {
|
||||||
SerializationFactory factory = new SerializationFactory(conf);
|
SerializationFactory factory = new SerializationFactory(conf);
|
||||||
int i = 0;
|
int i = 0;
|
||||||
long offset = out.size();
|
long offset = out.getPos();
|
||||||
for(T split: array) {
|
for(T split: array) {
|
||||||
int prevCount = out.size();
|
long prevCount = out.getPos();
|
||||||
Text.writeString(out, split.getClass().getName());
|
Text.writeString(out, split.getClass().getName());
|
||||||
Serializer<T> serializer =
|
Serializer<T> serializer =
|
||||||
factory.getSerializer((Class<T>) split.getClass());
|
factory.getSerializer((Class<T>) split.getClass());
|
||||||
serializer.open(out);
|
serializer.open(out);
|
||||||
serializer.serialize(split);
|
serializer.serialize(split);
|
||||||
int currCount = out.size();
|
long currCount = out.getPos();
|
||||||
info[i++] =
|
info[i++] =
|
||||||
new JobSplit.SplitMetaInfo(
|
new JobSplit.SplitMetaInfo(
|
||||||
split.getLocations(), offset,
|
split.getLocations(), offset,
|
||||||
|
@ -139,12 +139,12 @@ public class JobSplitWriter {
|
||||||
SplitMetaInfo[] info = new SplitMetaInfo[splits.length];
|
SplitMetaInfo[] info = new SplitMetaInfo[splits.length];
|
||||||
if (splits.length != 0) {
|
if (splits.length != 0) {
|
||||||
int i = 0;
|
int i = 0;
|
||||||
long offset = out.size();
|
long offset = out.getPos();
|
||||||
for(org.apache.hadoop.mapred.InputSplit split: splits) {
|
for(org.apache.hadoop.mapred.InputSplit split: splits) {
|
||||||
int prevLen = out.size();
|
long prevLen = out.getPos();
|
||||||
Text.writeString(out, split.getClass().getName());
|
Text.writeString(out, split.getClass().getName());
|
||||||
split.write(out);
|
split.write(out);
|
||||||
int currLen = out.size();
|
long currLen = out.getPos();
|
||||||
info[i++] = new JobSplit.SplitMetaInfo(
|
info[i++] = new JobSplit.SplitMetaInfo(
|
||||||
split.getLocations(), offset,
|
split.getLocations(), offset,
|
||||||
split.getLength());
|
split.getLength());
|
||||||
|
|
Loading…
Reference in New Issue