Java 类org.apache.hadoop.io.erasurecode.CodecUtil 实例源码
项目:hadoop-oss
文件:HHXORErasureDecoder.java
private RawErasureDecoder checkCreateRSRawDecoder() {
if (rsRawDecoder == null) {
rsRawDecoder = CodecUtil.createRSRawDecoder(getConf(),
getNumDataUnits(), getNumParityUnits());
}
return rsRawDecoder;
}
项目:hadoop-oss
文件:HHXORErasureDecoder.java
private RawErasureEncoder checkCreateXorRawEncoder() {
if (xorRawEncoder == null) {
xorRawEncoder = CodecUtil.createXORRawEncoder(getConf(),
getNumDataUnits(), getNumParityUnits());
xorRawEncoder.setCoderOption(CoderOption.ALLOW_CHANGE_INPUTS, false);
}
return xorRawEncoder;
}
项目:hadoop-oss
文件:HHXORErasureEncoder.java
private RawErasureEncoder checkCreateRSRawEncoder() {
if (rsRawEncoder == null) {
rsRawEncoder = CodecUtil.createRSRawEncoder(getConf(),
getNumDataUnits(), getNumParityUnits());
}
return rsRawEncoder;
}
项目:hadoop-oss
文件:HHXORErasureEncoder.java
private RawErasureEncoder checkCreateXorRawEncoder() {
if (xorRawEncoder == null) {
xorRawEncoder = CodecUtil.createXORRawEncoder(getConf(),
getNumDataUnits(), getNumParityUnits());
xorRawEncoder.setCoderOption(CoderOption.ALLOW_CHANGE_INPUTS, false);
}
return xorRawEncoder;
}
项目:hadoop-oss
文件:XORErasureDecoder.java
@Override
protected ErasureCodingStep prepareDecodingStep(
final ECBlockGroup blockGroup) {
RawErasureDecoder rawDecoder = CodecUtil.createXORRawDecoder(getConf(),
getNumDataUnits(), getNumParityUnits());
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks),
getOutputBlocks(blockGroup), rawDecoder);
}
项目:hadoop-oss
文件:RSErasureEncoder.java
private RawErasureEncoder checkCreateRSRawEncoder() {
if (rawEncoder == null) {
rawEncoder = CodecUtil.createRSRawEncoder(getConf(),
getNumDataUnits(), getNumParityUnits());
}
return rawEncoder;
}
项目:hadoop-oss
文件:XORErasureEncoder.java
@Override
protected ErasureCodingStep prepareEncodingStep(
final ECBlockGroup blockGroup) {
RawErasureEncoder rawEncoder = CodecUtil.createXORRawEncoder(getConf(),
getNumDataUnits(), getNumParityUnits());
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureEncodingStep(inputBlocks,
getOutputBlocks(blockGroup), rawEncoder);
}
项目:hadoop-oss
文件:RSErasureDecoder.java
private RawErasureDecoder checkCreateRSRawDecoder() {
if (rsRawDecoder == null) {
rsRawDecoder = CodecUtil.createRSRawDecoder(getConf(),
getNumDataUnits(), getNumParityUnits());
}
return rsRawDecoder;
}
项目:aliyun-oss-hadoop-fs
文件:DFSStripedOutputStream.java
/** Construct a new output stream for creating a file. */
DFSStripedOutputStream(DFSClient dfsClient, String src, HdfsFileStatus stat,
EnumSet<CreateFlag> flag, Progressable progress,
DataChecksum checksum, String[] favoredNodes)
throws IOException {
super(dfsClient, src, stat, flag, progress, checksum, favoredNodes, false);
if (LOG.isDebugEnabled()) {
LOG.debug("Creating DFSStripedOutputStream for " + src);
}
final ErasureCodingPolicy ecPolicy = stat.getErasureCodingPolicy();
final int numParityBlocks = ecPolicy.getNumParityUnits();
cellSize = ecPolicy.getCellSize();
numDataBlocks = ecPolicy.getNumDataUnits();
numAllBlocks = numDataBlocks + numParityBlocks;
this.favoredNodes = favoredNodes;
failedStreamers = new ArrayList<>();
corruptBlockCountMap = new LinkedHashMap<>();
encoder = CodecUtil.createRSRawEncoder(dfsClient.getConfiguration(),
numDataBlocks, numParityBlocks);
coordinator = new Coordinator(numAllBlocks);
try {
cellBuffers = new CellBuffers(numParityBlocks);
} catch (InterruptedException ie) {
throw DFSUtilClient.toInterruptedIOException(
"Failed to create cell buffers", ie);
}
streamers = new ArrayList<>(numAllBlocks);
for (short i = 0; i < numAllBlocks; i++) {
StripedDataStreamer streamer = new StripedDataStreamer(stat,
dfsClient, src, progress, checksum, cachingStrategy, byteArrayManager,
favoredNodes, i, coordinator);
streamers.add(streamer);
}
currentPackets = new DFSPacket[streamers.size()];
setCurrentStreamer(0);
}
项目:aliyun-oss-hadoop-fs
文件:StripedFileTestUtil.java
static void verifyParityBlocks(Configuration conf, final long size,
final int cellSize, byte[][] dataBytes, byte[][] parityBytes,
Set<Integer> checkSet) {
// verify the parity blocks
int parityBlkSize = (int) StripedBlockUtil.getInternalBlockLength(
size, cellSize, dataBytes.length, dataBytes.length);
final byte[][] expectedParityBytes = new byte[parityBytes.length][];
for (int i = 0; i < parityBytes.length; i++) {
expectedParityBytes[i] = new byte[parityBlkSize];
}
for (int i = 0; i < dataBytes.length; i++) {
if (dataBytes[i] == null) {
dataBytes[i] = new byte[dataBytes[0].length];
} else if (dataBytes[i].length < dataBytes[0].length) {
final byte[] tmp = dataBytes[i];
dataBytes[i] = new byte[dataBytes[0].length];
System.arraycopy(tmp, 0, dataBytes[i], 0, tmp.length);
}
}
final RawErasureEncoder encoder =
CodecUtil.createRSRawEncoder(conf, dataBytes.length, parityBytes.length);
encoder.encode(dataBytes, expectedParityBytes);
for (int i = 0; i < parityBytes.length; i++) {
if (checkSet.contains(i + dataBytes.length)){
Assert.assertArrayEquals("i=" + i, expectedParityBytes[i],
parityBytes[i]);
}
}
}
项目:aliyun-oss-hadoop-fs
文件:XORErasureDecoder.java
@Override
protected ErasureCodingStep prepareDecodingStep(
final ECBlockGroup blockGroup) {
RawErasureDecoder rawDecoder = CodecUtil.createXORRawDecoder(getConf(),
getNumDataUnits(), getNumParityUnits());
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks),
getOutputBlocks(blockGroup), rawDecoder);
}
项目:aliyun-oss-hadoop-fs
文件:RSErasureEncoder.java
private RawErasureEncoder checkCreateRSRawEncoder() {
if (rawEncoder == null) {
rawEncoder = CodecUtil.createRSRawEncoder(getConf(),
getNumDataUnits(), getNumParityUnits());
}
return rawEncoder;
}
项目:aliyun-oss-hadoop-fs
文件:XORErasureEncoder.java
@Override
protected ErasureCodingStep prepareEncodingStep(
final ECBlockGroup blockGroup) {
RawErasureEncoder rawEncoder = CodecUtil.createXORRawEncoder(getConf(),
getNumDataUnits(), getNumParityUnits());
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureEncodingStep(inputBlocks,
getOutputBlocks(blockGroup), rawEncoder);
}
项目:aliyun-oss-hadoop-fs
文件:RSErasureDecoder.java
private RawErasureDecoder checkCreateRSRawDecoder() {
if (rsRawDecoder == null) {
rsRawDecoder = CodecUtil.createRSRawDecoder(getConf(),
getNumDataUnits(), getNumParityUnits());
}
return rsRawDecoder;
}
项目:hops
文件:HHXORErasureDecoder.java
private RawErasureDecoder checkCreateRSRawDecoder() {
if (rsRawDecoder == null) {
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
getNumDataUnits(), getNumParityUnits());
rsRawDecoder = CodecUtil.createRawDecoder(getConf(),
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
}
return rsRawDecoder;
}
项目:hops
文件:HHXORErasureDecoder.java
private RawErasureEncoder checkCreateXorRawEncoder() {
if (xorRawEncoder == null) {
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
getNumDataUnits(), getNumParityUnits());
xorRawEncoder = CodecUtil.createRawEncoder(getConf(),
ErasureCodeConstants.XOR_CODEC_NAME, coderOptions);
}
return xorRawEncoder;
}
项目:hops
文件:HHXORErasureEncoder.java
private RawErasureEncoder checkCreateRSRawEncoder() {
if (rsRawEncoder == null) {
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
getNumDataUnits(), getNumParityUnits());
rsRawEncoder = CodecUtil.createRawEncoder(getConf(),
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
}
return rsRawEncoder;
}
项目:hops
文件:HHXORErasureEncoder.java
private RawErasureEncoder checkCreateXorRawEncoder() {
if (xorRawEncoder == null) {
ErasureCoderOptions erasureCoderOptions = new ErasureCoderOptions(
getNumDataUnits(), getNumParityUnits());
xorRawEncoder = CodecUtil.createRawEncoder(getConf(),
ErasureCodeConstants.XOR_CODEC_NAME,
erasureCoderOptions);
}
return xorRawEncoder;
}
项目:hops
文件:XORErasureDecoder.java
@Override
protected ErasureCodingStep prepareDecodingStep(
final ECBlockGroup blockGroup) {
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
getNumDataUnits(), getNumParityUnits());
RawErasureDecoder rawDecoder = CodecUtil.createRawDecoder(getConf(),
ErasureCodeConstants.XOR_CODEC_NAME, coderOptions);
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureDecodingStep(inputBlocks,
getErasedIndexes(inputBlocks),
getOutputBlocks(blockGroup), rawDecoder);
}
项目:hops
文件:RSErasureEncoder.java
private RawErasureEncoder checkCreateRSRawEncoder() {
if (rawEncoder == null) {
// TODO: we should create the raw coder according to codec.
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
getNumDataUnits(), getNumParityUnits());
rawEncoder = CodecUtil.createRawEncoder(getConf(),
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
}
return rawEncoder;
}
项目:hops
文件:XORErasureEncoder.java
@Override
protected ErasureCodingStep prepareEncodingStep(
final ECBlockGroup blockGroup) {
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
getNumDataUnits(), getNumParityUnits());
RawErasureEncoder rawEncoder = CodecUtil.createRawEncoder(getConf(),
ErasureCodeConstants.XOR_CODEC_NAME, coderOptions);
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new ErasureEncodingStep(inputBlocks,
getOutputBlocks(blockGroup), rawEncoder);
}
项目:hops
文件:RSErasureDecoder.java
private RawErasureDecoder checkCreateRSRawDecoder() {
if (rsRawDecoder == null) {
// TODO: we should create the raw coder according to codec.
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
getNumDataUnits(), getNumParityUnits());
rsRawDecoder = CodecUtil.createRawDecoder(getConf(),
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
}
return rsRawDecoder;
}
项目:aliyun-oss-hadoop-fs
文件:ErasureCodingWorker.java
private RawErasureDecoder newDecoder(int numDataUnits, int numParityUnits) {
return CodecUtil.createRSRawDecoder(conf, numDataUnits, numParityUnits);
}