HADOOP-11705. Make erasure coder configurable. Contributed by Kai Zheng

This commit is contained in:
drankye 2015-03-12 23:35:22 +08:00 committed by Zhe Zhang
parent f05c21285e
commit 292e367d07
6 changed files with 60 additions and 9 deletions

View File

@ -18,3 +18,7 @@
HADOOP-11646. Erasure Coder API for encoding and decoding of block group
( Kai Zheng via vinayakumarb )
HADOOP-11705. Make erasure coder configurable. Contributed by Kai Zheng
( Kai Zheng )

View File

@ -17,12 +17,15 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.conf.Configured;
/**
* A common class of basic facilities to be shared by encoder and decoder
*
* It implements the {@link ErasureCoder} interface.
*/
public abstract class AbstractErasureCoder implements ErasureCoder {
public abstract class AbstractErasureCoder
extends Configured implements ErasureCoder {
private int numDataUnits;
private int numParityUnits;

View File

@ -17,12 +17,15 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.conf.Configured;
/**
* A common class of basic facilities to be shared by encoder and decoder
*
* It implements the {@link RawErasureCoder} interface.
*/
public abstract class AbstractRawErasureCoder implements RawErasureCoder {
public abstract class AbstractRawErasureCoder
extends Configured implements RawErasureCoder {
private int numDataUnits;
private int numParityUnits;

View File

@ -43,6 +43,12 @@ public abstract class TestCoderBase {
// may go to different coding implementations.
protected boolean usingDirectBuffer = true;
/**
* Prepare before running the case.
* @param numDataUnits
* @param numParityUnits
* @param erasedIndexes
*/
protected void prepare(int numDataUnits, int numParityUnits,
int[] erasedIndexes) {
this.numDataUnits = numDataUnits;

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -29,6 +30,7 @@ public abstract class TestErasureCoderBase extends TestCoderBase {
protected Class<? extends ErasureEncoder> encoderClass;
protected Class<? extends ErasureDecoder> decoderClass;
private Configuration conf;
protected int numChunksInBlock = 16;
/**
@ -45,6 +47,19 @@ public TestBlock(ECChunk[] chunks) {
}
}
/**
* Prepare before running the case.
* @param conf
* @param numDataUnits
* @param numParityUnits
* @param erasedIndexes
*/
protected void prepare(Configuration conf, int numDataUnits,
int numParityUnits, int[] erasedIndexes) {
this.conf = conf;
super.prepare(numDataUnits, numParityUnits, erasedIndexes);
}
/**
* Generating source data, encoding, recovering and then verifying.
* RawErasureCoder mainly uses ECChunk to pass input and output data buffers,
@ -56,6 +71,7 @@ protected void testCoding(boolean usingDirectBuffer) {
this.usingDirectBuffer = usingDirectBuffer;
ErasureEncoder encoder = createEncoder();
// Generate data and encode
ECBlockGroup blockGroup = prepareBlockGroupForEncoding();
// Backup all the source chunks for later recovering because some coders
@ -65,17 +81,25 @@ protected void testCoding(boolean usingDirectBuffer) {
// Make a copy of a strip for later comparing
TestBlock[] toEraseBlocks = copyDataBlocksToErase(clonedDataBlocks);
ErasureCodingStep codingStep = encoder.encode(blockGroup);
performCodingStep(codingStep);
ErasureCodingStep codingStep;
try {
codingStep = encoder.encode(blockGroup);
performCodingStep(codingStep);
} finally {
encoder.release();
}
// Erase the copied sources
eraseSomeDataBlocks(clonedDataBlocks);
//Decode
blockGroup = new ECBlockGroup(clonedDataBlocks, blockGroup.getParityBlocks());
ErasureDecoder decoder = createDecoder();
codingStep = decoder.decode(blockGroup);
performCodingStep(codingStep);
try {
codingStep = decoder.decode(blockGroup);
performCodingStep(codingStep);
} finally {
decoder.release();
}
//Compare
compareAndVerify(toEraseBlocks, codingStep.getOutputBlocks());
}
@ -138,6 +162,7 @@ private ErasureEncoder createEncoder() {
}
encoder.initialize(numDataUnits, numParityUnits, chunkSize);
encoder.setConf(conf);
return encoder;
}
@ -154,6 +179,7 @@ private ErasureDecoder createDecoder() {
}
decoder.initialize(numDataUnits, numParityUnits, chunkSize);
decoder.setConf(conf);
return decoder;
}

View File

@ -49,7 +49,11 @@ protected void testCoding(boolean usingDirectBuffer) {
// Make a copy of a strip for later comparing
ECChunk[] toEraseDataChunks = copyDataChunksToErase(clonedDataChunks);
encoder.encode(dataChunks, parityChunks);
try {
encoder.encode(dataChunks, parityChunks);
} finally {
encoder.release();
}
// Erase the copied sources
eraseSomeDataBlocks(clonedDataChunks);
@ -58,7 +62,12 @@ protected void testCoding(boolean usingDirectBuffer) {
parityChunks);
ECChunk[] recoveredChunks = prepareOutputChunksForDecoding();
RawErasureDecoder decoder = createDecoder();
decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks);
try {
decoder.decode(inputChunks,
getErasedIndexesForDecoding(), recoveredChunks);
} finally {
decoder.release();
}
//Compare
compareAndVerify(toEraseDataChunks, recoveredChunks);