HADOOP-11706 Refine a little bit erasure coder API

This commit is contained in:
Kai Zheng 2015-03-18 19:21:37 +08:00 committed by Zhe Zhang
parent a38a37c634
commit c3bc083405
6 changed files with 31 additions and 29 deletions

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.conf.Configurable;
/** /**
* An erasure coder to perform encoding or decoding given a group. Generally it * An erasure coder to perform encoding or decoding given a group. Generally it
* involves calculating necessary internal steps according to codec logic. For * involves calculating necessary internal steps according to codec logic. For
@ -31,7 +33,7 @@
* of multiple coding steps. * of multiple coding steps.
* *
*/ */
public interface ErasureCoder { public interface ErasureCoder extends Configurable {
/** /**
* Initialize with the important parameters for the code. * Initialize with the important parameters for the code.

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.conf.Configurable;
/** /**
* RawErasureCoder is a common interface for {@link RawErasureEncoder} and * RawErasureCoder is a common interface for {@link RawErasureEncoder} and
* {@link RawErasureDecoder} as both encoder and decoder share some properties. * {@link RawErasureDecoder} as both encoder and decoder share some properties.
@ -31,7 +33,7 @@
* low level constructs, since it only takes care of the math calculation with * low level constructs, since it only takes care of the math calculation with
* a group of byte buffers. * a group of byte buffers.
*/ */
public interface RawErasureCoder { public interface RawErasureCoder extends Configurable {
/** /**
* Initialize with the important parameters for the code. * Initialize with the important parameters for the code.

View File

@ -17,11 +17,12 @@
*/ */
package org.apache.hadoop.io.erasurecode; package org.apache.hadoop.io.erasurecode;
import org.apache.hadoop.conf.Configuration;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Random; import java.util.Random;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
/** /**
@ -31,6 +32,7 @@
public abstract class TestCoderBase { public abstract class TestCoderBase {
protected static Random RAND = new Random(); protected static Random RAND = new Random();
private Configuration conf;
protected int numDataUnits; protected int numDataUnits;
protected int numParityUnits; protected int numParityUnits;
protected int chunkSize = 16 * 1024; protected int chunkSize = 16 * 1024;
@ -49,14 +51,23 @@ public abstract class TestCoderBase {
* @param numParityUnits * @param numParityUnits
* @param erasedIndexes * @param erasedIndexes
*/ */
protected void prepare(int numDataUnits, int numParityUnits, protected void prepare(Configuration conf, int numDataUnits,
int[] erasedIndexes) { int numParityUnits, int[] erasedIndexes) {
this.conf = conf;
this.numDataUnits = numDataUnits; this.numDataUnits = numDataUnits;
this.numParityUnits = numParityUnits; this.numParityUnits = numParityUnits;
this.erasedDataIndexes = erasedIndexes != null ? this.erasedDataIndexes = erasedIndexes != null ?
erasedIndexes : new int[] {0}; erasedIndexes : new int[] {0};
} }
/**
* Get the conf the test.
* @return configuration
*/
protected Configuration getConf() {
return this.conf;
}
/** /**
* Compare and verify if erased chunks are equal to recovered chunks * Compare and verify if erased chunks are equal to recovered chunks
* @param erasedChunks * @param erasedChunks

View File

@ -17,10 +17,9 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.TestCoderBase; import org.apache.hadoop.io.erasurecode.TestCoderBase;
/** /**
@ -30,7 +29,6 @@ public abstract class TestErasureCoderBase extends TestCoderBase {
protected Class<? extends ErasureEncoder> encoderClass; protected Class<? extends ErasureEncoder> encoderClass;
protected Class<? extends ErasureDecoder> decoderClass; protected Class<? extends ErasureDecoder> decoderClass;
private Configuration conf;
protected int numChunksInBlock = 16; protected int numChunksInBlock = 16;
/** /**
@ -47,19 +45,6 @@ public TestBlock(ECChunk[] chunks) {
} }
} }
/**
* Prepare before running the case.
* @param conf
* @param numDataUnits
* @param numParityUnits
* @param erasedIndexes
*/
protected void prepare(Configuration conf, int numDataUnits,
int numParityUnits, int[] erasedIndexes) {
this.conf = conf;
super.prepare(numDataUnits, numParityUnits, erasedIndexes);
}
/** /**
* Generating source data, encoding, recovering and then verifying. * Generating source data, encoding, recovering and then verifying.
* RawErasureCoder mainly uses ECChunk to pass input and output data buffers, * RawErasureCoder mainly uses ECChunk to pass input and output data buffers,
@ -162,7 +147,7 @@ private ErasureEncoder createEncoder() {
} }
encoder.initialize(numDataUnits, numParityUnits, chunkSize); encoder.initialize(numDataUnits, numParityUnits, chunkSize);
((AbstractErasureCoder)encoder).setConf(conf); encoder.setConf(getConf());
return encoder; return encoder;
} }
@ -179,7 +164,7 @@ private ErasureDecoder createDecoder() {
} }
decoder.initialize(numDataUnits, numParityUnits, chunkSize); decoder.initialize(numDataUnits, numParityUnits, chunkSize);
((AbstractErasureCoder)decoder).setConf(conf); decoder.setConf(getConf());
return decoder; return decoder;
} }

View File

@ -46,37 +46,37 @@ public void setup() {
@Test @Test
public void testCodingNoDirectBuffer_10x4() { public void testCodingNoDirectBuffer_10x4() {
prepare(10, 4, null); prepare(null, 10, 4, null);
testCoding(false); testCoding(false);
} }
@Test @Test
public void testCodingDirectBuffer_10x4() { public void testCodingDirectBuffer_10x4() {
prepare(10, 4, null); prepare(null, 10, 4, null);
testCoding(true); testCoding(true);
} }
@Test @Test
public void testCodingDirectBuffer_10x4_erasure_of_2_4() { public void testCodingDirectBuffer_10x4_erasure_of_2_4() {
prepare(10, 4, new int[] {2, 4}); prepare(null, 10, 4, new int[] {2, 4});
testCoding(true); testCoding(true);
} }
@Test @Test
public void testCodingDirectBuffer_10x4_erasing_all() { public void testCodingDirectBuffer_10x4_erasing_all() {
prepare(10, 4, new int[] {0, 1, 2, 3}); prepare(null, 10, 4, new int[] {0, 1, 2, 3});
testCoding(true); testCoding(true);
} }
@Test @Test
public void testCodingNoDirectBuffer_3x3() { public void testCodingNoDirectBuffer_3x3() {
prepare(3, 3, null); prepare(null, 3, 3, null);
testCoding(false); testCoding(false);
} }
@Test @Test
public void testCodingDirectBuffer_3x3() { public void testCodingDirectBuffer_3x3() {
prepare(3, 3, null); prepare(null, 3, 3, null);
testCoding(true); testCoding(true);
} }

View File

@ -86,6 +86,7 @@ protected RawErasureEncoder createEncoder() {
} }
encoder.initialize(numDataUnits, numParityUnits, chunkSize); encoder.initialize(numDataUnits, numParityUnits, chunkSize);
encoder.setConf(getConf());
return encoder; return encoder;
} }
@ -102,6 +103,7 @@ protected RawErasureDecoder createDecoder() {
} }
decoder.initialize(numDataUnits, numParityUnits, chunkSize); decoder.initialize(numDataUnits, numParityUnits, chunkSize);
decoder.setConf(getConf());
return decoder; return decoder;
} }