HADOOP-11706 Refine a little bit erasure coder API

This commit is contained in:
Kai Zheng 2015-03-18 19:21:37 +08:00 committed by Zhe Zhang
parent a38a37c634
commit c3bc083405
6 changed files with 31 additions and 29 deletions

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.conf.Configurable;
/**
* An erasure coder to perform encoding or decoding given a group. Generally it
* involves calculating necessary internal steps according to codec logic. For
@ -31,7 +33,7 @@
* of multiple coding steps.
*
*/
public interface ErasureCoder {
public interface ErasureCoder extends Configurable {
/**
* Initialize with the important parameters for the code.

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.conf.Configurable;
/**
* RawErasureCoder is a common interface for {@link RawErasureEncoder} and
* {@link RawErasureDecoder} as both encoder and decoder share some properties.
@ -31,7 +33,7 @@
* low level constructs, since it only takes care of the math calculation with
* a group of byte buffers.
*/
public interface RawErasureCoder {
public interface RawErasureCoder extends Configurable {
/**
* Initialize with the important parameters for the code.

View File

@ -17,11 +17,12 @@
*/
package org.apache.hadoop.io.erasurecode;
import org.apache.hadoop.conf.Configuration;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Random;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue;
/**
@ -31,6 +32,7 @@
public abstract class TestCoderBase {
protected static Random RAND = new Random();
private Configuration conf;
protected int numDataUnits;
protected int numParityUnits;
protected int chunkSize = 16 * 1024;
@ -49,14 +51,23 @@ public abstract class TestCoderBase {
* @param numParityUnits
* @param erasedIndexes
*/
protected void prepare(int numDataUnits, int numParityUnits,
int[] erasedIndexes) {
protected void prepare(Configuration conf, int numDataUnits,
int numParityUnits, int[] erasedIndexes) {
this.conf = conf;
this.numDataUnits = numDataUnits;
this.numParityUnits = numParityUnits;
this.erasedDataIndexes = erasedIndexes != null ?
erasedIndexes : new int[] {0};
}
/**
* Get the conf the test.
* @return configuration
*/
protected Configuration getConf() {
return this.conf;
}
/**
* Compare and verify if erased chunks are equal to recovered chunks
* @param erasedChunks

View File

@ -17,10 +17,9 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.TestCoderBase;
/**
@ -30,7 +29,6 @@ public abstract class TestErasureCoderBase extends TestCoderBase {
protected Class<? extends ErasureEncoder> encoderClass;
protected Class<? extends ErasureDecoder> decoderClass;
private Configuration conf;
protected int numChunksInBlock = 16;
/**
@ -47,19 +45,6 @@ public TestBlock(ECChunk[] chunks) {
}
}
/**
* Prepare before running the case.
* @param conf
* @param numDataUnits
* @param numParityUnits
* @param erasedIndexes
*/
protected void prepare(Configuration conf, int numDataUnits,
int numParityUnits, int[] erasedIndexes) {
this.conf = conf;
super.prepare(numDataUnits, numParityUnits, erasedIndexes);
}
/**
* Generating source data, encoding, recovering and then verifying.
* RawErasureCoder mainly uses ECChunk to pass input and output data buffers,
@ -162,7 +147,7 @@ private ErasureEncoder createEncoder() {
}
encoder.initialize(numDataUnits, numParityUnits, chunkSize);
((AbstractErasureCoder)encoder).setConf(conf);
encoder.setConf(getConf());
return encoder;
}
@ -179,7 +164,7 @@ private ErasureDecoder createDecoder() {
}
decoder.initialize(numDataUnits, numParityUnits, chunkSize);
((AbstractErasureCoder)decoder).setConf(conf);
decoder.setConf(getConf());
return decoder;
}

View File

@ -46,37 +46,37 @@ public void setup() {
@Test
public void testCodingNoDirectBuffer_10x4() {
prepare(10, 4, null);
prepare(null, 10, 4, null);
testCoding(false);
}
@Test
public void testCodingDirectBuffer_10x4() {
prepare(10, 4, null);
prepare(null, 10, 4, null);
testCoding(true);
}
@Test
public void testCodingDirectBuffer_10x4_erasure_of_2_4() {
prepare(10, 4, new int[] {2, 4});
prepare(null, 10, 4, new int[] {2, 4});
testCoding(true);
}
@Test
public void testCodingDirectBuffer_10x4_erasing_all() {
prepare(10, 4, new int[] {0, 1, 2, 3});
prepare(null, 10, 4, new int[] {0, 1, 2, 3});
testCoding(true);
}
@Test
public void testCodingNoDirectBuffer_3x3() {
prepare(3, 3, null);
prepare(null, 3, 3, null);
testCoding(false);
}
@Test
public void testCodingDirectBuffer_3x3() {
prepare(3, 3, null);
prepare(null, 3, 3, null);
testCoding(true);
}

View File

@ -86,6 +86,7 @@ protected RawErasureEncoder createEncoder() {
}
encoder.initialize(numDataUnits, numParityUnits, chunkSize);
encoder.setConf(getConf());
return encoder;
}
@ -102,6 +103,7 @@ protected RawErasureDecoder createDecoder() {
}
decoder.initialize(numDataUnits, numParityUnits, chunkSize);
decoder.setConf(getConf());
return decoder;
}