HDFS-8382 Remove chunkSize and initialize from erasure coder. Contributed by Kai Zheng

This commit is contained in:
Kai Zheng 2015-05-25 16:13:29 +08:00 committed by Zhe Zhang
parent 0ed92e5b13
commit b30e96bfb4
31 changed files with 191 additions and 205 deletions

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.coder.*;
import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper; import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
/** /**
@ -28,10 +27,9 @@ import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
public abstract class AbstractErasureCodec extends Configured public abstract class AbstractErasureCodec extends Configured
implements ErasureCodec { implements ErasureCodec {
private ECSchema schema; private final ECSchema schema;
@Override public AbstractErasureCodec(ECSchema schema) {
public void setSchema(ECSchema schema) {
this.schema = schema; this.schema = schema;
} }
@ -39,7 +37,7 @@ public abstract class AbstractErasureCodec extends Configured
return schema.getCodecName(); return schema.getCodecName();
} }
protected ECSchema getSchema() { public ECSchema getSchema() {
return schema; return schema;
} }
@ -50,39 +48,4 @@ public abstract class AbstractErasureCodec extends Configured
return blockGrouper; return blockGrouper;
} }
@Override
public ErasureCoder createEncoder() {
ErasureCoder encoder = doCreateEncoder();
prepareErasureCoder(encoder);
return encoder;
}
/**
* Create a new encoder instance to be initialized afterwards.
* @return encoder
*/
protected abstract ErasureCoder doCreateEncoder();
@Override
public ErasureCoder createDecoder() {
ErasureCoder decoder = doCreateDecoder();
prepareErasureCoder(decoder);
return decoder;
}
/**
* Create a new decoder instance to be initialized afterwards.
* @return decoder
*/
protected abstract ErasureCoder doCreateDecoder();
private void prepareErasureCoder(ErasureCoder erasureCoder) {
if (getSchema() == null) {
throw new RuntimeException("No schema been set yet");
}
erasureCoder.setConf(getConf());
erasureCoder.initialize(getSchema());
}
} }

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.io.erasurecode.codec; package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder; import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper; import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
@ -29,12 +28,6 @@ import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
*/ */
public interface ErasureCodec extends Configurable { public interface ErasureCodec extends Configurable {
/**
* Set EC schema to be used by this codec.
* @param schema
*/
public void setSchema(ECSchema schema);
/** /**
* Create block grouper * Create block grouper
* @return block grouper * @return block grouper

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.codec; package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder; import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
import org.apache.hadoop.io.erasurecode.coder.RSErasureDecoder; import org.apache.hadoop.io.erasurecode.coder.RSErasureDecoder;
import org.apache.hadoop.io.erasurecode.coder.RSErasureEncoder; import org.apache.hadoop.io.erasurecode.coder.RSErasureEncoder;
@ -26,13 +27,17 @@ import org.apache.hadoop.io.erasurecode.coder.RSErasureEncoder;
*/ */
public class RSErasureCodec extends AbstractErasureCodec { public class RSErasureCodec extends AbstractErasureCodec {
@Override public RSErasureCodec(ECSchema schema) {
protected ErasureCoder doCreateEncoder() { super(schema);
return new RSErasureEncoder();
} }
@Override @Override
protected ErasureCoder doCreateDecoder() { public ErasureCoder createEncoder() {
return new RSErasureDecoder(); return new RSErasureEncoder(getSchema());
}
@Override
public ErasureCoder createDecoder() {
return new RSErasureDecoder(getSchema());
} }
} }

View File

@ -27,19 +27,18 @@ import org.apache.hadoop.io.erasurecode.coder.XORErasureEncoder;
*/ */
public class XORErasureCodec extends AbstractErasureCodec { public class XORErasureCodec extends AbstractErasureCodec {
@Override public XORErasureCodec(ECSchema schema) {
public void setSchema(ECSchema schema) { super(schema);
super.setSchema(schema);
assert(schema.getNumParityUnits() == 1); assert(schema.getNumParityUnits() == 1);
} }
@Override @Override
protected ErasureCoder doCreateEncoder() { public ErasureCoder createEncoder() {
return new XORErasureEncoder(); return new XORErasureEncoder(getSchema());
} }
@Override @Override
protected ErasureCoder doCreateDecoder() { public ErasureCoder createDecoder() {
return new XORErasureDecoder(); return new XORErasureDecoder(getSchema());
} }
} }

View File

@ -33,18 +33,18 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
public abstract class AbstractErasureCoder public abstract class AbstractErasureCoder
extends Configured implements ErasureCoder { extends Configured implements ErasureCoder {
private int numDataUnits; private final int numDataUnits;
private int numParityUnits; private final int numParityUnits;
private int chunkSize;
/** /**
* Create raw decoder using the factory specified by rawCoderFactoryKey * Create raw decoder using the factory specified by rawCoderFactoryKey
* @param rawCoderFactoryKey * @param rawCoderFactoryKey
* @return raw decoder * @return raw decoder
*/ */
protected RawErasureDecoder createRawDecoder(String rawCoderFactoryKey) { protected RawErasureDecoder createRawDecoder(
String rawCoderFactoryKey, int dataUnitsCount, int parityUnitsCount) {
RawErasureCoder rawCoder = createRawCoder(getConf(), RawErasureCoder rawCoder = createRawCoder(getConf(),
rawCoderFactoryKey, false); rawCoderFactoryKey, false, dataUnitsCount, parityUnitsCount);
return (RawErasureDecoder) rawCoder; return (RawErasureDecoder) rawCoder;
} }
@ -53,9 +53,10 @@ public abstract class AbstractErasureCoder
* @param rawCoderFactoryKey * @param rawCoderFactoryKey
* @return raw encoder * @return raw encoder
*/ */
protected RawErasureEncoder createRawEncoder(String rawCoderFactoryKey) { protected RawErasureEncoder createRawEncoder(
String rawCoderFactoryKey, int dataUnitsCount, int parityUnitsCount) {
RawErasureCoder rawCoder = createRawCoder(getConf(), RawErasureCoder rawCoder = createRawCoder(getConf(),
rawCoderFactoryKey, true); rawCoderFactoryKey, true, dataUnitsCount, parityUnitsCount);
return (RawErasureEncoder) rawCoder; return (RawErasureEncoder) rawCoder;
} }
@ -67,7 +68,8 @@ public abstract class AbstractErasureCoder
* @return raw coder * @return raw coder
*/ */
public static RawErasureCoder createRawCoder(Configuration conf, public static RawErasureCoder createRawCoder(Configuration conf,
String rawCoderFactoryKey, boolean isEncoder) { String rawCoderFactoryKey, boolean isEncoder, int numDataUnits,
int numParityUnits) {
if (conf == null) { if (conf == null) {
return null; return null;
@ -90,21 +92,17 @@ public abstract class AbstractErasureCoder
throw new RuntimeException("Failed to create raw coder", e); throw new RuntimeException("Failed to create raw coder", e);
} }
return isEncoder ? fact.createEncoder() : fact.createDecoder(); return isEncoder ? fact.createEncoder(numDataUnits, numParityUnits) :
fact.createDecoder(numDataUnits, numParityUnits);
} }
@Override public AbstractErasureCoder(int numDataUnits, int numParityUnits) {
public void initialize(int numDataUnits, int numParityUnits,
int chunkSize) {
this.numDataUnits = numDataUnits; this.numDataUnits = numDataUnits;
this.numParityUnits = numParityUnits; this.numParityUnits = numParityUnits;
this.chunkSize = chunkSize;
} }
@Override public AbstractErasureCoder(ECSchema schema) {
public void initialize(ECSchema schema) { this(schema.getNumDataUnits(), schema.getNumParityUnits());
initialize(schema.getNumDataUnits(), schema.getNumParityUnits(),
schema.getChunkSize());
} }
@Override @Override
@ -118,12 +116,7 @@ public abstract class AbstractErasureCoder
} }
@Override @Override
public int getChunkSize() { public boolean preferDirectBuffer() {
return chunkSize;
}
@Override
public boolean preferNativeBuffer() {
return false; return false;
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
/** /**
* An abstract erasure decoder that's to be inherited by new decoders. * An abstract erasure decoder that's to be inherited by new decoders.
@ -27,6 +28,14 @@ import org.apache.hadoop.io.erasurecode.ECBlockGroup;
*/ */
public abstract class AbstractErasureDecoder extends AbstractErasureCoder { public abstract class AbstractErasureDecoder extends AbstractErasureCoder {
public AbstractErasureDecoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
public AbstractErasureDecoder(ECSchema schema) {
super(schema);
}
@Override @Override
public ErasureCodingStep calculateCoding(ECBlockGroup blockGroup) { public ErasureCodingStep calculateCoding(ECBlockGroup blockGroup) {
// We may have more than this when considering complicate cases. HADOOP-11550 // We may have more than this when considering complicate cases. HADOOP-11550

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
/** /**
* An abstract erasure encoder that's to be inherited by new encoders. * An abstract erasure encoder that's to be inherited by new encoders.
@ -27,6 +28,14 @@ import org.apache.hadoop.io.erasurecode.ECBlockGroup;
*/ */
public abstract class AbstractErasureEncoder extends AbstractErasureCoder { public abstract class AbstractErasureEncoder extends AbstractErasureCoder {
public AbstractErasureEncoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
public AbstractErasureEncoder(ECSchema schema) {
super(schema);
}
@Override @Override
public ErasureCodingStep calculateCoding(ECBlockGroup blockGroup) { public ErasureCodingStep calculateCoding(ECBlockGroup blockGroup) {
// We may have more than this when considering complicate cases. HADOOP-11550 // We may have more than this when considering complicate cases. HADOOP-11550

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
/** /**
* An erasure coder to perform encoding or decoding given a group. Generally it * An erasure coder to perform encoding or decoding given a group. Generally it
@ -37,20 +36,6 @@ import org.apache.hadoop.io.erasurecode.ECSchema;
*/ */
public interface ErasureCoder extends Configurable { public interface ErasureCoder extends Configurable {
/**
* Initialize with the important parameters for the code.
* @param numDataUnits how many data inputs for the coding
* @param numParityUnits how many parity outputs the coding generates
* @param chunkSize the size of the input/output buffer
*/
public void initialize(int numDataUnits, int numParityUnits, int chunkSize);
/**
* Initialize with an EC schema.
* @param schema
*/
public void initialize(ECSchema schema);
/** /**
* The number of data input units for the coding. A unit can be a byte, * The number of data input units for the coding. A unit can be a byte,
* chunk or buffer or even a block. * chunk or buffer or even a block.
@ -65,12 +50,6 @@ public interface ErasureCoder extends Configurable {
*/ */
public int getNumParityUnits(); public int getNumParityUnits();
/**
* Chunk buffer size for the input/output
* @return chunk buffer size
*/
public int getChunkSize();
/** /**
* Calculate the encoding or decoding steps given a block blockGroup. * Calculate the encoding or decoding steps given a block blockGroup.
* *
@ -83,13 +62,13 @@ public interface ErasureCoder extends Configurable {
public ErasureCodingStep calculateCoding(ECBlockGroup blockGroup); public ErasureCodingStep calculateCoding(ECBlockGroup blockGroup);
/** /**
* Tell if native or off-heap buffer is preferred or not. It's for callers to * Tell if direct or off-heap buffer is preferred or not. It's for callers to
* decide how to allocate coding chunk buffers, either on heap or off heap. * decide how to allocate coding chunk buffers, either on heap or off heap.
* It will return false by default. * It will return false by default.
* @return true if native buffer is preferred for performance consideration, * @return true if direct buffer is preferred for performance consideration,
* otherwise false. * otherwise false.
*/ */
public boolean preferNativeBuffer(); public boolean preferDirectBuffer();
/** /**
* Release the resources if any. Good chance to invoke RawErasureCoder#release. * Release the resources if any. Good chance to invoke RawErasureCoder#release.

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder; import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
@ -31,6 +32,14 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
public class RSErasureDecoder extends AbstractErasureDecoder { public class RSErasureDecoder extends AbstractErasureDecoder {
private RawErasureDecoder rsRawDecoder; private RawErasureDecoder rsRawDecoder;
public RSErasureDecoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
public RSErasureDecoder(ECSchema schema) {
super(schema);
}
@Override @Override
protected ErasureCodingStep prepareDecodingStep(final ECBlockGroup blockGroup) { protected ErasureCodingStep prepareDecodingStep(final ECBlockGroup blockGroup) {
@ -45,12 +54,11 @@ public class RSErasureDecoder extends AbstractErasureDecoder {
private RawErasureDecoder checkCreateRSRawDecoder() { private RawErasureDecoder checkCreateRSRawDecoder() {
if (rsRawDecoder == null) { if (rsRawDecoder == null) {
rsRawDecoder = createRawDecoder( rsRawDecoder = createRawDecoder(
CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY); CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY,
getNumDataUnits(), getNumParityUnits());
if (rsRawDecoder == null) { if (rsRawDecoder == null) {
rsRawDecoder = new RSRawDecoder(); rsRawDecoder = new RSRawDecoder(getNumDataUnits(), getNumParityUnits());
} }
rsRawDecoder.initialize(getNumDataUnits(),
getNumParityUnits(), getChunkSize());
} }
return rsRawDecoder; return rsRawDecoder;
} }

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoder; import org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoder;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
@ -31,6 +32,14 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
public class RSErasureEncoder extends AbstractErasureEncoder { public class RSErasureEncoder extends AbstractErasureEncoder {
private RawErasureEncoder rawEncoder; private RawErasureEncoder rawEncoder;
public RSErasureEncoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
public RSErasureEncoder(ECSchema schema) {
super(schema);
}
@Override @Override
protected ErasureCodingStep prepareEncodingStep(final ECBlockGroup blockGroup) { protected ErasureCodingStep prepareEncodingStep(final ECBlockGroup blockGroup) {
@ -45,12 +54,11 @@ public class RSErasureEncoder extends AbstractErasureEncoder {
private RawErasureEncoder checkCreateRSRawEncoder() { private RawErasureEncoder checkCreateRSRawEncoder() {
if (rawEncoder == null) { if (rawEncoder == null) {
rawEncoder = createRawEncoder( rawEncoder = createRawEncoder(
CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY); CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_RAWCODER_KEY,
getNumDataUnits(), getNumParityUnits());
if (rawEncoder == null) { if (rawEncoder == null) {
rawEncoder = new RSRawEncoder(); rawEncoder = new RSRawEncoder(getNumDataUnits(), getNumParityUnits());
} }
rawEncoder.initialize(getNumDataUnits(),
getNumParityUnits(), getChunkSize());
} }
return rawEncoder; return rawEncoder;
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
import org.apache.hadoop.io.erasurecode.rawcoder.XORRawDecoder; import org.apache.hadoop.io.erasurecode.rawcoder.XORRawDecoder;
@ -29,12 +30,19 @@ import org.apache.hadoop.io.erasurecode.rawcoder.XORRawDecoder;
*/ */
public class XORErasureDecoder extends AbstractErasureDecoder { public class XORErasureDecoder extends AbstractErasureDecoder {
public XORErasureDecoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
public XORErasureDecoder(ECSchema schema) {
super(schema);
}
@Override @Override
protected ErasureCodingStep prepareDecodingStep(final ECBlockGroup blockGroup) { protected ErasureCodingStep prepareDecodingStep(final ECBlockGroup blockGroup) {
// May be configured // May be configured
RawErasureDecoder rawDecoder = new XORRawDecoder(); RawErasureDecoder rawDecoder = new XORRawDecoder(
rawDecoder.initialize(getNumDataUnits(), getNumDataUnits(), getNumParityUnits());
getNumParityUnits(), getChunkSize());
ECBlock[] inputBlocks = getInputBlocks(blockGroup); ECBlock[] inputBlocks = getInputBlocks(blockGroup);

View File

@ -19,22 +19,30 @@ package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
import org.apache.hadoop.io.erasurecode.rawcoder.XORRawEncoder; import org.apache.hadoop.io.erasurecode.rawcoder.XORRawEncoder;
/** /**
* Xor erasure encoder that encodes a block group. * Xor erasure encoder that encodes a block group.
* *
* It implements {@link ErasureEncoder}. * It implements {@link ErasureCoder}.
*/ */
public class XORErasureEncoder extends AbstractErasureEncoder { public class XORErasureEncoder extends AbstractErasureEncoder {
public XORErasureEncoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
public XORErasureEncoder(ECSchema schema) {
super(schema);
}
@Override @Override
protected ErasureCodingStep prepareEncodingStep(final ECBlockGroup blockGroup) { protected ErasureCodingStep prepareEncodingStep(final ECBlockGroup blockGroup) {
// May be configured // May be configured
RawErasureEncoder rawEncoder = new XORRawEncoder(); RawErasureEncoder rawEncoder = new XORRawEncoder(
rawEncoder.initialize(getNumDataUnits(), getNumDataUnits(), getNumParityUnits());
getNumParityUnits(), getChunkSize());
ECBlock[] inputBlocks = getInputBlocks(blockGroup); ECBlock[] inputBlocks = getInputBlocks(blockGroup);

View File

@ -31,16 +31,12 @@ import java.util.Arrays;
public abstract class AbstractRawErasureCoder public abstract class AbstractRawErasureCoder
extends Configured implements RawErasureCoder { extends Configured implements RawErasureCoder {
private int numDataUnits; private final int numDataUnits;
private int numParityUnits; private final int numParityUnits;
private int chunkSize;
@Override public AbstractRawErasureCoder(int numDataUnits, int numParityUnits) {
public void initialize(int numDataUnits, int numParityUnits,
int chunkSize) {
this.numDataUnits = numDataUnits; this.numDataUnits = numDataUnits;
this.numParityUnits = numParityUnits; this.numParityUnits = numParityUnits;
this.chunkSize = chunkSize;
} }
@Override @Override
@ -53,11 +49,6 @@ public abstract class AbstractRawErasureCoder
return numParityUnits; return numParityUnits;
} }
@Override
public int getChunkSize() {
return chunkSize;
}
@Override @Override
public boolean preferDirectBuffer() { public boolean preferDirectBuffer() {
return false; return false;

View File

@ -30,6 +30,10 @@ import java.nio.ByteBuffer;
public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
implements RawErasureDecoder { implements RawErasureDecoder {
public AbstractRawErasureDecoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
@Override @Override
public void decode(ByteBuffer[] inputs, int[] erasedIndexes, public void decode(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) { ByteBuffer[] outputs) {

View File

@ -30,6 +30,10 @@ import java.nio.ByteBuffer;
public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder
implements RawErasureEncoder { implements RawErasureEncoder {
public AbstractRawErasureEncoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
@Override @Override
public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) { public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
checkParameters(inputs, outputs); checkParameters(inputs, outputs);

View File

@ -31,9 +31,8 @@ public class RSRawDecoder extends AbstractRawErasureDecoder {
private int[] errSignature; private int[] errSignature;
private int[] primitivePower; private int[] primitivePower;
@Override public RSRawDecoder(int numDataUnits, int numParityUnits) {
public void initialize(int numDataUnits, int numParityUnits, int chunkSize) { super(numDataUnits, numParityUnits);
super.initialize(numDataUnits, numParityUnits, chunkSize);
assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize()); assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());
this.errSignature = new int[numParityUnits]; this.errSignature = new int[numParityUnits];

View File

@ -29,9 +29,9 @@ import java.nio.ByteBuffer;
public class RSRawEncoder extends AbstractRawErasureEncoder { public class RSRawEncoder extends AbstractRawErasureEncoder {
private int[] generatingPolynomial; private int[] generatingPolynomial;
@Override public RSRawEncoder(int numDataUnits, int numParityUnits) {
public void initialize(int numDataUnits, int numParityUnits, int chunkSize) { super(numDataUnits, numParityUnits);
super.initialize(numDataUnits, numParityUnits, chunkSize);
assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize()); assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());
int[] primitivePower = RSUtil.getPrimitivePower(numDataUnits, int[] primitivePower = RSUtil.getPrimitivePower(numDataUnits,

View File

@ -23,12 +23,12 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
public class RSRawErasureCoderFactory implements RawErasureCoderFactory { public class RSRawErasureCoderFactory implements RawErasureCoderFactory {
@Override @Override
public RawErasureEncoder createEncoder() { public RawErasureEncoder createEncoder(int numDataUnits, int numParityUnits) {
return new RSRawEncoder(); return new RSRawEncoder(numDataUnits, numParityUnits);
} }
@Override @Override
public RawErasureDecoder createDecoder() { public RawErasureDecoder createDecoder(int numDataUnits, int numParityUnits) {
return new RSRawDecoder(); return new RSRawDecoder(numDataUnits, numParityUnits);
} }
} }

View File

@ -35,14 +35,6 @@ import org.apache.hadoop.conf.Configurable;
*/ */
public interface RawErasureCoder extends Configurable { public interface RawErasureCoder extends Configurable {
/**
* Initialize with the important parameters for the code.
* @param numDataUnits how many data inputs for the coding
* @param numParityUnits how many parity outputs the coding generates
* @param chunkSize the size of the input/output buffer
*/
public void initialize(int numDataUnits, int numParityUnits, int chunkSize);
/** /**
* The number of data input units for the coding. A unit can be a byte, * The number of data input units for the coding. A unit can be a byte,
* chunk or buffer or even a block. * chunk or buffer or even a block.
@ -57,12 +49,6 @@ public interface RawErasureCoder extends Configurable {
*/ */
public int getNumParityUnits(); public int getNumParityUnits();
/**
* Chunk buffer size for the input/output
* @return chunk buffer size
*/
public int getChunkSize();
/** /**
* Tell if direct buffer is preferred or not. It's for callers to * Tell if direct buffer is preferred or not. It's for callers to
* decide how to allocate coding chunk buffers, using DirectByteBuffer or * decide how to allocate coding chunk buffers, using DirectByteBuffer or

View File

@ -26,13 +26,17 @@ public interface RawErasureCoderFactory {
/** /**
* Create raw erasure encoder. * Create raw erasure encoder.
* @param numDataUnits
* @param numParityUnits
* @return raw erasure encoder * @return raw erasure encoder
*/ */
public RawErasureEncoder createEncoder(); public RawErasureEncoder createEncoder(int numDataUnits, int numParityUnits);
/** /**
* Create raw erasure decoder. * Create raw erasure decoder.
* @param numDataUnits
* @param numParityUnits
* @return raw erasure decoder * @return raw erasure decoder
*/ */
public RawErasureDecoder createDecoder(); public RawErasureDecoder createDecoder(int numDataUnits, int numParityUnits);
} }

View File

@ -28,6 +28,10 @@ import java.nio.ByteBuffer;
*/ */
public class XORRawDecoder extends AbstractRawErasureDecoder { public class XORRawDecoder extends AbstractRawErasureDecoder {
public XORRawDecoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
@Override @Override
protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes, protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) { ByteBuffer[] outputs) {

View File

@ -28,6 +28,10 @@ import java.nio.ByteBuffer;
*/ */
public class XORRawEncoder extends AbstractRawErasureEncoder { public class XORRawEncoder extends AbstractRawErasureEncoder {
public XORRawEncoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
}
protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) { protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
ByteBuffer output = outputs[0]; ByteBuffer output = outputs[0];
resetOutputBuffer(output); resetOutputBuffer(output);

View File

@ -23,12 +23,12 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
public class XORRawErasureCoderFactory implements RawErasureCoderFactory { public class XORRawErasureCoderFactory implements RawErasureCoderFactory {
@Override @Override
public RawErasureEncoder createEncoder() { public RawErasureEncoder createEncoder(int numDataUnits, int numParityUnits) {
return new XORRawEncoder(); return new XORRawEncoder(numDataUnits, numParityUnits);
} }
@Override @Override
public RawErasureDecoder createDecoder() { public RawErasureDecoder createDecoder(int numDataUnits, int numParityUnits) {
return new XORRawDecoder(); return new XORRawDecoder(numDataUnits, numParityUnits);
} }
} }

View File

@ -22,6 +22,8 @@ import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.TestCoderBase; import org.apache.hadoop.io.erasurecode.TestCoderBase;
import java.lang.reflect.Constructor;
/** /**
* Erasure coder test base with utilities. * Erasure coder test base with utilities.
*/ */
@ -139,23 +141,6 @@ public abstract class TestErasureCoderBase extends TestCoderBase {
} }
} }
/**
* Create erasure encoder for test.
* @return
*/
private ErasureCoder createEncoder() {
ErasureCoder encoder;
try {
encoder = encoderClass.newInstance();
} catch (Exception e) {
throw new RuntimeException("Failed to create encoder", e);
}
encoder.initialize(numDataUnits, numParityUnits, getChunkSize());
encoder.setConf(getConf());
return encoder;
}
private void prepareCoders() { private void prepareCoders() {
if (encoder == null) { if (encoder == null) {
encoder = createEncoder(); encoder = createEncoder();
@ -167,18 +152,39 @@ public abstract class TestErasureCoderBase extends TestCoderBase {
} }
/** /**
* Create the erasure decoder for the test. * Create the raw erasure encoder to test
* @return * @return
*/ */
private ErasureCoder createDecoder() { protected ErasureCoder createEncoder() {
ErasureCoder encoder;
try {
Constructor<? extends ErasureCoder> constructor =
(Constructor<? extends ErasureCoder>)
encoderClass.getConstructor(int.class, int.class);
encoder = constructor.newInstance(numDataUnits, numParityUnits);
} catch (Exception e) {
throw new RuntimeException("Failed to create encoder", e);
}
encoder.setConf(getConf());
return encoder;
}
/**
* create the raw erasure decoder to test
* @return
*/
protected ErasureCoder createDecoder() {
ErasureCoder decoder; ErasureCoder decoder;
try { try {
decoder = decoderClass.newInstance(); Constructor<? extends ErasureCoder> constructor =
(Constructor<? extends ErasureCoder>)
decoderClass.getConstructor(int.class, int.class);
decoder = constructor.newInstance(numDataUnits, numParityUnits);
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Failed to create decoder", e); throw new RuntimeException("Failed to create decoder", e);
} }
decoder.initialize(numDataUnits, numParityUnits, getChunkSize());
decoder.setConf(getConf()); decoder.setConf(getConf());
return decoder; return decoder;
} }

View File

@ -21,6 +21,8 @@ import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.TestCoderBase; import org.apache.hadoop.io.erasurecode.TestCoderBase;
import org.junit.Assert; import org.junit.Assert;
import java.lang.reflect.Constructor;
/** /**
* Raw coder test base with utilities. * Raw coder test base with utilities.
*/ */
@ -136,12 +138,14 @@ public abstract class TestRawCoderBase extends TestCoderBase {
protected RawErasureEncoder createEncoder() { protected RawErasureEncoder createEncoder() {
RawErasureEncoder encoder; RawErasureEncoder encoder;
try { try {
encoder = encoderClass.newInstance(); Constructor<? extends RawErasureEncoder> constructor =
(Constructor<? extends RawErasureEncoder>)
encoderClass.getConstructor(int.class, int.class);
encoder = constructor.newInstance(numDataUnits, numParityUnits);
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Failed to create encoder", e); throw new RuntimeException("Failed to create encoder", e);
} }
encoder.initialize(numDataUnits, numParityUnits, getChunkSize());
encoder.setConf(getConf()); encoder.setConf(getConf());
return encoder; return encoder;
} }
@ -153,14 +157,15 @@ public abstract class TestRawCoderBase extends TestCoderBase {
protected RawErasureDecoder createDecoder() { protected RawErasureDecoder createDecoder() {
RawErasureDecoder decoder; RawErasureDecoder decoder;
try { try {
decoder = decoderClass.newInstance(); Constructor<? extends RawErasureDecoder> constructor =
(Constructor<? extends RawErasureDecoder>)
decoderClass.getConstructor(int.class, int.class);
decoder = constructor.newInstance(numDataUnits, numParityUnits);
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Failed to create decoder", e); throw new RuntimeException("Failed to create decoder", e);
} }
decoder.initialize(numDataUnits, numParityUnits, getChunkSize());
decoder.setConf(getConf()); decoder.setConf(getConf());
return decoder; return decoder;
} }
} }

View File

@ -255,3 +255,5 @@
(waltersu4549) (waltersu4549)
HDFS-7768. Change fsck to support EC files. (Takanobu Asanuma via szetszwo) HDFS-7768. Change fsck to support EC files. (Takanobu Asanuma via szetszwo)
HDFS-8382. Remove chunkSize and initialize from erasure coder. (Kai Zheng)

View File

@ -247,8 +247,7 @@ public class DFSStripedOutputStream extends DFSOutputStream {
numDataBlocks = schema.getNumDataUnits(); numDataBlocks = schema.getNumDataUnits();
numAllBlocks = numDataBlocks + numParityBlocks; numAllBlocks = numDataBlocks + numParityBlocks;
encoder = new RSRawEncoder(); encoder = new RSRawEncoder(numDataBlocks, numParityBlocks);
encoder.initialize(numDataBlocks, numParityBlocks, cellSize);
coordinator = new Coordinator(dfsClient.getConf(), numDataBlocks, numAllBlocks); coordinator = new Coordinator(dfsClient.getConf(), numDataBlocks, numAllBlocks);
try { try {

View File

@ -111,12 +111,12 @@ public final class ErasureCodingWorker {
DFSConfigKeys.DFS_DATANODE_STRIPED_READ_BUFFER_SIZE_DEFAULT); DFSConfigKeys.DFS_DATANODE_STRIPED_READ_BUFFER_SIZE_DEFAULT);
} }
private RawErasureEncoder newEncoder() { private RawErasureEncoder newEncoder(int numDataUnits, int numParityUnits) {
return new RSRawEncoder(); return new RSRawEncoder(numDataUnits, numParityUnits);
} }
private RawErasureDecoder newDecoder() { private RawErasureDecoder newDecoder(int numDataUnits, int numParityUnits) {
return new RSRawDecoder(); return new RSRawDecoder(numDataUnits, numParityUnits);
} }
private void initializeStripedReadThreadPool(int num) { private void initializeStripedReadThreadPool(int num) {
@ -517,16 +517,14 @@ public final class ErasureCodingWorker {
// Initialize encoder // Initialize encoder
private void initEncoderIfNecessary() { private void initEncoderIfNecessary() {
if (encoder == null) { if (encoder == null) {
encoder = newEncoder(); encoder = newEncoder(dataBlkNum, parityBlkNum);
encoder.initialize(dataBlkNum, parityBlkNum, bufferSize);
} }
} }
// Initialize decoder // Initialize decoder
private void initDecoderIfNecessary() { private void initDecoderIfNecessary() {
if (decoder == null) { if (decoder == null) {
decoder = newDecoder(); decoder = newDecoder(dataBlkNum, parityBlkNum);
decoder.initialize(dataBlkNum, parityBlkNum, bufferSize);
} }
} }

View File

@ -287,8 +287,7 @@ public class StripedBlockUtil {
} }
byte[][] outputs = new byte[parityBlkNum][(int) alignedStripe.getSpanInBlock()]; byte[][] outputs = new byte[parityBlkNum][(int) alignedStripe.getSpanInBlock()];
RSRawDecoder rsRawDecoder = new RSRawDecoder(); RSRawDecoder rsRawDecoder = new RSRawDecoder(dataBlkNum, parityBlkNum);
rsRawDecoder.initialize(dataBlkNum, parityBlkNum, (int) alignedStripe.getSpanInBlock());
rsRawDecoder.decode(decodeInputs, decodeIndices, outputs); rsRawDecoder.decode(decodeInputs, decodeIndices, outputs);
for (int i = 0; i < dataBlkNum + parityBlkNum; i++) { for (int i = 0; i < dataBlkNum + parityBlkNum; i++) {

View File

@ -274,8 +274,8 @@ public class TestDFSStripedOutputStream {
System.arraycopy(tmp, 0, dataBytes[i], 0, tmp.length); System.arraycopy(tmp, 0, dataBytes[i], 0, tmp.length);
} }
} }
final RawErasureEncoder encoder = new RSRawEncoder(); final RawErasureEncoder encoder =
encoder.initialize(dataBytes.length, parityBytes.length, cellSize); new RSRawEncoder(dataBytes.length, parityBytes.length);
encoder.encode(dataBytes, expectedParityBytes); encoder.encode(dataBytes, expectedParityBytes);
for (int i = 0; i < parityBytes.length; i++) { for (int i = 0; i < parityBytes.length; i++) {
if (i != killedDnIndex) { if (i != killedDnIndex) {

View File

@ -382,8 +382,7 @@ public class TestWriteReadStripedFile {
Assert.assertEquals("The length of file should be the same to write size", Assert.assertEquals("The length of file should be the same to write size",
length - startOffsetInFile, readLen); length - startOffsetInFile, readLen);
RSRawDecoder rsRawDecoder = new RSRawDecoder(); RSRawDecoder rsRawDecoder = new RSRawDecoder(dataBlocks, parityBlocks);
rsRawDecoder.initialize(dataBlocks, parityBlocks, 1);
byte[] expected = new byte[readLen]; byte[] expected = new byte[readLen];
for (int i = startOffsetInFile; i < length; i++) { for (int i = startOffsetInFile; i < length; i++) {
//TODO: workaround (filling fixed bytes), to remove after HADOOP-11938 //TODO: workaround (filling fixed bytes), to remove after HADOOP-11938