Java 类org.apache.hadoop.io.erasurecode.ECBlock 实例源码
项目:hadoop-oss
文件:HHXORErasureDecoder.java
@Override
protected ErasureCodingStep prepareDecodingStep(
final ECBlockGroup blockGroup) {
RawErasureDecoder rawDecoder;
RawErasureEncoder rawEncoder;
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
ECBlock[] outputBlocks = getOutputBlocks(blockGroup);
rawDecoder = checkCreateRSRawDecoder();
rawEncoder = checkCreateXorRawEncoder();
return new HHXORErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks), outputBlocks, rawDecoder,
rawEncoder);
}
项目:hadoop-oss
文件:AbstractErasureDecoder.java
/**
* Which blocks were erased ?
* @param blockGroup
* @return output blocks to recover
*/
protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) {
ECBlock[] outputBlocks = new ECBlock[getNumErasedBlocks(blockGroup)];
int idx = 0;
for (int i = 0; i < getNumDataUnits(); i++) {
if (blockGroup.getDataBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getDataBlocks()[i];
}
}
for (int i = 0; i < getNumParityUnits(); i++) {
if (blockGroup.getParityBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getParityBlocks()[i];
}
}
return outputBlocks;
}
项目:hadoop-oss
文件:AbstractErasureDecoder.java
/**
* Get indexes of erased blocks from inputBlocks
* @param inputBlocks
* @return indexes of erased blocks from inputBlocks
*/
protected int[] getErasedIndexes(ECBlock[] inputBlocks) {
int numErased = getNumErasedBlocks(inputBlocks);
if (numErased == 0) {
return new int[0];
}
int[] erasedIndexes = new int[numErased];
int i = 0, j = 0;
for (; i < inputBlocks.length && j < erasedIndexes.length; i++) {
if (inputBlocks[i].isErased()) {
erasedIndexes[j++] = i;
}
}
return erasedIndexes;
}
项目:hadoop-oss
文件:AbstractErasureDecoder.java
/**
* Get erased input blocks from inputBlocks
* @param inputBlocks
* @return an array of erased blocks from inputBlocks
*/
protected ECBlock[] getErasedBlocks(ECBlock[] inputBlocks) {
int numErased = getNumErasedBlocks(inputBlocks);
if (numErased == 0) {
return new ECBlock[0];
}
ECBlock[] erasedBlocks = new ECBlock[numErased];
int i = 0, j = 0;
for (; i < inputBlocks.length && j < erasedBlocks.length; i++) {
if (inputBlocks[i].isErased()) {
erasedBlocks[j++] = inputBlocks[i];
}
}
return erasedBlocks;
}
项目:aliyun-oss-hadoop-fs
文件:AbstractErasureDecoder.java
/**
* Which blocks were erased ?
* @param blockGroup
* @return output blocks to recover
*/
protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) {
ECBlock[] outputBlocks = new ECBlock[getNumErasedBlocks(blockGroup)];
int idx = 0;
for (int i = 0; i < getNumDataUnits(); i++) {
if (blockGroup.getDataBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getDataBlocks()[i];
}
}
for (int i = 0; i < getNumParityUnits(); i++) {
if (blockGroup.getParityBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getParityBlocks()[i];
}
}
return outputBlocks;
}
项目:aliyun-oss-hadoop-fs
文件:AbstractErasureDecoder.java
/**
* Get indexes of erased blocks from inputBlocks
* @param inputBlocks
* @return indexes of erased blocks from inputBlocks
*/
protected int[] getErasedIndexes(ECBlock[] inputBlocks) {
int numErased = getNumErasedBlocks(inputBlocks);
if (numErased == 0) {
return new int[0];
}
int[] erasedIndexes = new int[numErased];
int i = 0, j = 0;
for (; i < inputBlocks.length && j < erasedIndexes.length; i++) {
if (inputBlocks[i].isErased()) {
erasedIndexes[j++] = i;
}
}
return erasedIndexes;
}
项目:aliyun-oss-hadoop-fs
文件:AbstractErasureDecoder.java
/**
* Get erased input blocks from inputBlocks
* @param inputBlocks
* @return an array of erased blocks from inputBlocks
*/
protected ECBlock[] getErasedBlocks(ECBlock[] inputBlocks) {
int numErased = getNumErasedBlocks(inputBlocks);
if (numErased == 0) {
return new ECBlock[0];
}
ECBlock[] erasedBlocks = new ECBlock[numErased];
int i = 0, j = 0;
for (; i < inputBlocks.length && j < erasedBlocks.length; i++) {
if (inputBlocks[i].isErased()) {
erasedBlocks[j++] = inputBlocks[i];
}
}
return erasedBlocks;
}
项目:hops
文件:HHXORErasureDecoder.java
@Override
protected ErasureCodingStep prepareDecodingStep(
final ECBlockGroup blockGroup) {
RawErasureDecoder rawDecoder;
RawErasureEncoder rawEncoder;
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
ECBlock[] outputBlocks = getOutputBlocks(blockGroup);
rawDecoder = checkCreateRSRawDecoder();
rawEncoder = checkCreateXorRawEncoder();
return new HHXORErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks), outputBlocks, rawDecoder,
rawEncoder);
}
项目:hops
文件:AbstractErasureDecoder.java
/**
* Which blocks were erased ?
* @param blockGroup
* @return output blocks to recover
*/
protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) {
ECBlock[] outputBlocks = new ECBlock[getNumErasedBlocks(blockGroup)];
int idx = 0;
for (int i = 0; i < getNumDataUnits(); i++) {
if (blockGroup.getDataBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getDataBlocks()[i];
}
}
for (int i = 0; i < getNumParityUnits(); i++) {
if (blockGroup.getParityBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getParityBlocks()[i];
}
}
return outputBlocks;
}
项目:hops
文件:AbstractErasureDecoder.java
/**
* Get indexes of erased blocks from inputBlocks
* @param inputBlocks
* @return indexes of erased blocks from inputBlocks
*/
protected int[] getErasedIndexes(ECBlock[] inputBlocks) {
int numErased = getNumErasedBlocks(inputBlocks);
if (numErased == 0) {
return new int[0];
}
int[] erasedIndexes = new int[numErased];
int i = 0, j = 0;
for (; i < inputBlocks.length && j < erasedIndexes.length; i++) {
if (inputBlocks[i].isErased()) {
erasedIndexes[j++] = i;
}
}
return erasedIndexes;
}
项目:hops
文件:AbstractErasureDecoder.java
/**
* Get erased input blocks from inputBlocks
* @param inputBlocks
* @return an array of erased blocks from inputBlocks
*/
protected ECBlock[] getErasedBlocks(ECBlock[] inputBlocks) {
int numErased = getNumErasedBlocks(inputBlocks);
if (numErased == 0) {
return new ECBlock[0];
}
ECBlock[] erasedBlocks = new ECBlock[numErased];
int i = 0, j = 0;
for (; i < inputBlocks.length && j < erasedBlocks.length; i++) {
if (inputBlocks[i].isErased()) {
erasedBlocks[j++] = inputBlocks[i];
}
}
return erasedBlocks;
}
项目:hadoop-oss
文件:HHXORErasureDecodingStep.java
/**
* The constructor with all the necessary info.
* @param inputBlocks
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
* @param outputBlocks
* @param rawDecoder underlying RS decoder for hitchhiker decoding
* @param rawEncoder underlying XOR encoder for hitchhiker decoding
*/
public HHXORErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
ECBlock[] outputBlocks, RawErasureDecoder rawDecoder,
RawErasureEncoder rawEncoder) {
super(inputBlocks, outputBlocks);
this.pbIndex = rawDecoder.getNumParityUnits() - 1;
this.erasedIndexes = erasedIndexes;
this.rsRawDecoder = rawDecoder;
this.xorRawEncoder = rawEncoder;
this.piggyBackIndex = HHUtil.initPiggyBackIndexWithoutPBVec(
rawDecoder.getNumDataUnits(), rawDecoder.getNumParityUnits());
this.piggyBackFullIndex = HHUtil.initPiggyBackFullIndexVec(
rawDecoder.getNumDataUnits(), piggyBackIndex);
}
项目:hadoop-oss
文件:HHXORErasureEncoder.java
@Override
protected ErasureCodingStep prepareEncodingStep(
final ECBlockGroup blockGroup) {
RawErasureEncoder rsRawEncoderTmp = checkCreateRSRawEncoder();
RawErasureEncoder xorRawEncoderTmp = checkCreateXorRawEncoder();
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new HHXORErasureEncodingStep(inputBlocks,
getOutputBlocks(blockGroup), rsRawEncoderTmp, xorRawEncoderTmp);
}
项目:hadoop-oss
文件:XORErasureDecoder.java
@Override
protected ErasureCodingStep prepareDecodingStep(
final ECBlockGroup blockGroup) {
RawErasureDecoder rawDecoder = CodecUtil.createXORRawDecoder(getConf(),
getNumDataUnits(), getNumParityUnits());
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks),
getOutputBlocks(blockGroup), rawDecoder);
}
项目:hadoop-oss
文件:XORErasureDecoder.java
/**
* Which blocks were erased ? For XOR it's simple we only allow and return one
* erased block, either data or parity.
* @param blockGroup
* @return output blocks to recover
*/
@Override
protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) {
/**
* If more than one blocks (either data or parity) erased, then it's not
* edible to recover. We don't have the check here since it will be done
* by upper level: ErasreCoder call can be avoid if not possible to recover
* at all.
*/
int erasedNum = getNumErasedBlocks(blockGroup);
ECBlock[] outputBlocks = new ECBlock[erasedNum];
int idx = 0;
for (int i = 0; i < getNumParityUnits(); i++) {
if (blockGroup.getParityBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getParityBlocks()[i];
}
}
for (int i = 0; i < getNumDataUnits(); i++) {
if (blockGroup.getDataBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getDataBlocks()[i];
}
}
return outputBlocks;
}
项目:hadoop-oss
文件:XORErasureEncoder.java
@Override
protected ErasureCodingStep prepareEncodingStep(
final ECBlockGroup blockGroup) {
RawErasureEncoder rawEncoder = CodecUtil.createXORRawEncoder(getConf(),
getNumDataUnits(), getNumParityUnits());
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureEncodingStep(inputBlocks,
getOutputBlocks(blockGroup), rawEncoder);
}
项目:hadoop-oss
文件:HHXORErasureEncodingStep.java
/**
* The constructor with all the necessary info.
*
* @param inputBlocks
* @param outputBlocks
* @param rsRawEncoder underlying RS encoder for hitchhiker encoding
* @param xorRawEncoder underlying XOR encoder for hitchhiker encoding
*/
public HHXORErasureEncodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks,
RawErasureEncoder rsRawEncoder,
RawErasureEncoder xorRawEncoder) {
super(inputBlocks, outputBlocks);
this.rsRawEncoder = rsRawEncoder;
this.xorRawEncoder = xorRawEncoder;
piggyBackIndex = HHUtil.initPiggyBackIndexWithoutPBVec(
rsRawEncoder.getNumDataUnits(), rsRawEncoder.getNumParityUnits());
}
项目:hadoop-oss
文件:AbstractErasureDecoder.java
/**
* We have all the data blocks and parity blocks as input blocks for
* recovering by default. It's codec specific
* @param blockGroup
* @return input blocks
*/
protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) {
ECBlock[] inputBlocks = new ECBlock[getNumDataUnits() +
getNumParityUnits()];
System.arraycopy(blockGroup.getDataBlocks(), 0, inputBlocks,
0, getNumDataUnits());
System.arraycopy(blockGroup.getParityBlocks(), 0, inputBlocks,
getNumDataUnits(), getNumParityUnits());
return inputBlocks;
}
项目:hadoop-oss
文件:AbstractErasureDecoder.java
/**
* Find out how many blocks are erased.
* @param inputBlocks all the input blocks
* @return number of erased blocks
*/
protected static int getNumErasedBlocks(ECBlock[] inputBlocks) {
int numErased = 0;
for (int i = 0; i < inputBlocks.length; i++) {
if (inputBlocks[i].isErased()) {
numErased ++;
}
}
return numErased;
}
项目:hadoop-oss
文件:ErasureDecodingStep.java
/**
* The constructor with all the necessary info.
* @param inputBlocks
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
* @param outputBlocks
* @param rawDecoder
*/
public ErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
ECBlock[] outputBlocks,
RawErasureDecoder rawDecoder) {
super(inputBlocks, outputBlocks);
this.erasedIndexes = erasedIndexes;
this.rawDecoder = rawDecoder;
}
项目:hadoop-oss
文件:RSErasureDecoder.java
@Override
protected ErasureCodingStep prepareDecodingStep(final ECBlockGroup blockGroup) {
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
ECBlock[] outputBlocks = getOutputBlocks(blockGroup);
RawErasureDecoder rawDecoder = checkCreateRSRawDecoder();
return new ErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks), outputBlocks, rawDecoder);
}
项目:hadoop-oss
文件:BlockGrouper.java
/**
* Calculating and organizing BlockGroup, to be called by ECManager
* @param dataBlocks Data blocks to compute parity blocks against
* @param parityBlocks To be computed parity blocks
* @return
*/
public ECBlockGroup makeBlockGroup(ECBlock[] dataBlocks,
ECBlock[] parityBlocks) {
ECBlockGroup blockGroup = new ECBlockGroup(dataBlocks, parityBlocks);
return blockGroup;
}
项目:hadoop-oss
文件:TestHHErasureCoderBase.java
@Override
protected void performCodingStep(ErasureCodingStep codingStep) {
// Pretend that we're opening these input blocks and output blocks.
ECBlock[] inputBlocks = codingStep.getInputBlocks();
ECBlock[] outputBlocks = codingStep.getOutputBlocks();
// We allocate input and output chunks accordingly.
ECChunk[] inputChunks = new ECChunk[inputBlocks.length * subPacketSize];
ECChunk[] outputChunks = new ECChunk[outputBlocks.length * subPacketSize];
for (int i = 0; i < numChunksInBlock; i += subPacketSize) {
// Pretend that we're reading input chunks from input blocks.
for (int k = 0; k < subPacketSize; ++k) {
for (int j = 0; j < inputBlocks.length; ++j) {
inputChunks[k * inputBlocks.length + j] = ((TestBlock)
inputBlocks[j]).chunks[i + k];
}
// Pretend that we allocate and will write output results to the blocks.
for (int j = 0; j < outputBlocks.length; ++j) {
outputChunks[k * outputBlocks.length + j] = allocateOutputChunk();
((TestBlock) outputBlocks[j]).chunks[i + k] =
outputChunks[k * outputBlocks.length + j];
}
}
// Given the input chunks and output chunk buffers, just call it !
codingStep.performCoding(inputChunks, outputChunks);
}
codingStep.finish();
}
项目:hadoop-oss
文件:TestErasureCoderBase.java
/**
* This is typically how a coding step should be performed.
* @param codingStep
*/
protected void performCodingStep(ErasureCodingStep codingStep) {
// Pretend that we're opening these input blocks and output blocks.
ECBlock[] inputBlocks = codingStep.getInputBlocks();
ECBlock[] outputBlocks = codingStep.getOutputBlocks();
// We allocate input and output chunks accordingly.
ECChunk[] inputChunks = new ECChunk[inputBlocks.length];
ECChunk[] outputChunks = new ECChunk[outputBlocks.length];
for (int i = 0; i < numChunksInBlock; ++i) {
// Pretend that we're reading input chunks from input blocks.
for (int j = 0; j < inputBlocks.length; ++j) {
inputChunks[j] = ((TestBlock) inputBlocks[j]).chunks[i];
}
// Pretend that we allocate and will write output results to the blocks.
for (int j = 0; j < outputBlocks.length; ++j) {
outputChunks[j] = allocateOutputChunk();
((TestBlock) outputBlocks[j]).chunks[i] = outputChunks[j];
}
// Given the input chunks and output chunk buffers, just call it !
codingStep.performCoding(inputChunks, outputChunks);
}
codingStep.finish();
}
项目:hadoop-oss
文件:TestErasureCoderBase.java
/**
* Compare and verify if recovered blocks data are the same with the erased
* blocks data.
* @param erasedBlocks
* @param recoveredBlocks
*/
protected void compareAndVerify(ECBlock[] erasedBlocks,
ECBlock[] recoveredBlocks) {
for (int i = 0; i < erasedBlocks.length; ++i) {
compareAndVerify(((TestBlock) erasedBlocks[i]).chunks, ((TestBlock) recoveredBlocks[i]).chunks);
}
}
项目:hadoop-oss
文件:TestErasureCoderBase.java
/**
* Prepare a block group for encoding.
* @return
*/
protected ECBlockGroup prepareBlockGroupForEncoding() {
ECBlock[] dataBlocks = new TestBlock[numDataUnits];
ECBlock[] parityBlocks = new TestBlock[numParityUnits];
for (int i = 0; i < numDataUnits; i++) {
dataBlocks[i] = generateDataBlock();
}
for (int i = 0; i < numParityUnits; i++) {
parityBlocks[i] = allocateOutputBlock();
}
return new ECBlockGroup(dataBlocks, parityBlocks);
}
项目:hadoop-oss
文件:TestErasureCoderBase.java
/**
* Generate random data and return a data block.
* @return
*/
protected ECBlock generateDataBlock() {
ECChunk[] chunks = new ECChunk[numChunksInBlock];
for (int i = 0; i < numChunksInBlock; ++i) {
chunks[i] = generateDataChunk();
}
return new TestBlock(chunks);
}
项目:aliyun-oss-hadoop-fs
文件:XORErasureDecoder.java
@Override
protected ErasureCodingStep prepareDecodingStep(
final ECBlockGroup blockGroup) {
RawErasureDecoder rawDecoder = CodecUtil.createXORRawDecoder(getConf(),
getNumDataUnits(), getNumParityUnits());
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks),
getOutputBlocks(blockGroup), rawDecoder);
}
项目:aliyun-oss-hadoop-fs
文件:XORErasureDecoder.java
/**
* Which blocks were erased ? For XOR it's simple we only allow and return one
* erased block, either data or parity.
* @param blockGroup
* @return output blocks to recover
*/
@Override
protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) {
/**
* If more than one blocks (either data or parity) erased, then it's not
* edible to recover. We don't have the check here since it will be done
* by upper level: ErasreCoder call can be avoid if not possible to recover
* at all.
*/
int erasedNum = getNumErasedBlocks(blockGroup);
ECBlock[] outputBlocks = new ECBlock[erasedNum];
int idx = 0;
for (int i = 0; i < getNumParityUnits(); i++) {
if (blockGroup.getParityBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getParityBlocks()[i];
}
}
for (int i = 0; i < getNumDataUnits(); i++) {
if (blockGroup.getDataBlocks()[i].isErased()) {
outputBlocks[idx++] = blockGroup.getDataBlocks()[i];
}
}
return outputBlocks;
}
项目:aliyun-oss-hadoop-fs
文件:XORErasureEncoder.java
@Override
protected ErasureCodingStep prepareEncodingStep(
final ECBlockGroup blockGroup) {
RawErasureEncoder rawEncoder = CodecUtil.createXORRawEncoder(getConf(),
getNumDataUnits(), getNumParityUnits());
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureEncodingStep(inputBlocks,
getOutputBlocks(blockGroup), rawEncoder);
}
项目:aliyun-oss-hadoop-fs
文件:AbstractErasureDecoder.java
/**
* We have all the data blocks and parity blocks as input blocks for
* recovering by default. It's codec specific
* @param blockGroup
* @return
*/
protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) {
ECBlock[] inputBlocks = new ECBlock[getNumDataUnits() +
getNumParityUnits()];
System.arraycopy(blockGroup.getDataBlocks(), 0, inputBlocks,
0, getNumDataUnits());
System.arraycopy(blockGroup.getParityBlocks(), 0, inputBlocks,
getNumDataUnits(), getNumParityUnits());
return inputBlocks;
}
项目:aliyun-oss-hadoop-fs
文件:AbstractErasureDecoder.java
/**
* Find out how many blocks are erased.
* @param inputBlocks all the input blocks
* @return number of erased blocks
*/
protected static int getNumErasedBlocks(ECBlock[] inputBlocks) {
int numErased = 0;
for (int i = 0; i < inputBlocks.length; i++) {
if (inputBlocks[i].isErased()) {
numErased ++;
}
}
return numErased;
}
项目:aliyun-oss-hadoop-fs
文件:ErasureDecodingStep.java
/**
* The constructor with all the necessary info.
* @param inputBlocks
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
* @param outputBlocks
* @param rawDecoder
*/
public ErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
ECBlock[] outputBlocks,
RawErasureDecoder rawDecoder) {
super(inputBlocks, outputBlocks);
this.erasedIndexes = erasedIndexes;
this.rawDecoder = rawDecoder;
}
项目:aliyun-oss-hadoop-fs
文件:RSErasureDecoder.java
@Override
protected ErasureCodingStep prepareDecodingStep(final ECBlockGroup blockGroup) {
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
ECBlock[] outputBlocks = getOutputBlocks(blockGroup);
RawErasureDecoder rawDecoder = checkCreateRSRawDecoder();
return new ErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks), outputBlocks, rawDecoder);
}
项目:aliyun-oss-hadoop-fs
文件:BlockGrouper.java
/**
* Calculating and organizing BlockGroup, to be called by ECManager
* @param dataBlocks Data blocks to compute parity blocks against
* @param parityBlocks To be computed parity blocks
* @return
*/
public ECBlockGroup makeBlockGroup(ECBlock[] dataBlocks,
ECBlock[] parityBlocks) {
ECBlockGroup blockGroup = new ECBlockGroup(dataBlocks, parityBlocks);
return blockGroup;
}
项目:aliyun-oss-hadoop-fs
文件:TestErasureCoderBase.java
/**
* This is typically how a coding step should be performed.
* @param codingStep
*/
private void performCodingStep(ErasureCodingStep codingStep) {
// Pretend that we're opening these input blocks and output blocks.
ECBlock[] inputBlocks = codingStep.getInputBlocks();
ECBlock[] outputBlocks = codingStep.getOutputBlocks();
// We allocate input and output chunks accordingly.
ECChunk[] inputChunks = new ECChunk[inputBlocks.length];
ECChunk[] outputChunks = new ECChunk[outputBlocks.length];
for (int i = 0; i < numChunksInBlock; ++i) {
// Pretend that we're reading input chunks from input blocks.
for (int j = 0; j < inputBlocks.length; ++j) {
inputChunks[j] = ((TestBlock) inputBlocks[j]).chunks[i];
}
// Pretend that we allocate and will write output results to the blocks.
for (int j = 0; j < outputBlocks.length; ++j) {
outputChunks[j] = allocateOutputChunk();
((TestBlock) outputBlocks[j]).chunks[i] = outputChunks[j];
}
// Given the input chunks and output chunk buffers, just call it !
codingStep.performCoding(inputChunks, outputChunks);
}
codingStep.finish();
}
项目:aliyun-oss-hadoop-fs
文件:TestErasureCoderBase.java
/**
* Compare and verify if recovered blocks data are the same with the erased
* blocks data.
* @param erasedBlocks
* @param recoveredBlocks
*/
protected void compareAndVerify(ECBlock[] erasedBlocks,
ECBlock[] recoveredBlocks) {
for (int i = 0; i < erasedBlocks.length; ++i) {
compareAndVerify(((TestBlock) erasedBlocks[i]).chunks, ((TestBlock) recoveredBlocks[i]).chunks);
}
}
项目:aliyun-oss-hadoop-fs
文件:TestErasureCoderBase.java
/**
* Prepare a block group for encoding.
* @return
*/
protected ECBlockGroup prepareBlockGroupForEncoding() {
ECBlock[] dataBlocks = new TestBlock[numDataUnits];
ECBlock[] parityBlocks = new TestBlock[numParityUnits];
for (int i = 0; i < numDataUnits; i++) {
dataBlocks[i] = generateDataBlock();
}
for (int i = 0; i < numParityUnits; i++) {
parityBlocks[i] = allocateOutputBlock();
}
return new ECBlockGroup(dataBlocks, parityBlocks);
}
项目:aliyun-oss-hadoop-fs
文件:TestErasureCoderBase.java
/**
* Generate random data and return a data block.
* @return
*/
protected ECBlock generateDataBlock() {
ECChunk[] chunks = new ECChunk[numChunksInBlock];
for (int i = 0; i < numChunksInBlock; ++i) {
chunks[i] = generateDataChunk();
}
return new TestBlock(chunks);
}
项目:hops
文件:HHXORErasureDecodingStep.java
/**
* The constructor with all the necessary info.
* @param inputBlocks
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
* @param outputBlocks
* @param rawDecoder underlying RS decoder for hitchhiker decoding
* @param rawEncoder underlying XOR encoder for hitchhiker decoding
*/
public HHXORErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
ECBlock[] outputBlocks, RawErasureDecoder rawDecoder,
RawErasureEncoder rawEncoder) {
super(inputBlocks, outputBlocks);
this.pbIndex = rawDecoder.getNumParityUnits() - 1;
this.erasedIndexes = erasedIndexes;
this.rsRawDecoder = rawDecoder;
this.xorRawEncoder = rawEncoder;
this.piggyBackIndex = HHUtil.initPiggyBackIndexWithoutPBVec(
rawDecoder.getNumDataUnits(), rawDecoder.getNumParityUnits());
this.piggyBackFullIndex = HHUtil.initPiggyBackFullIndexVec(
rawDecoder.getNumDataUnits(), piggyBackIndex);
}