Java 类org.apache.hadoop.io.erasurecode.rawcoder.util.DumpUtil 实例源码
项目:hadoop-oss
文件:RSRawEncoder.java
public RSRawEncoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
throw new HadoopIllegalArgumentException(
"Invalid numDataUnits and numParityUnits");
}
encodeMatrix = new byte[getNumAllUnits() * numDataUnits];
RSUtil.genCauchyMatrix(encodeMatrix, getNumAllUnits(), numDataUnits);
if (isAllowingVerboseDump()) {
DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, getNumAllUnits());
}
gfTables = new byte[getNumAllUnits() * numDataUnits * 32];
RSUtil.initTables(numDataUnits, numParityUnits, encodeMatrix,
numDataUnits * numDataUnits, gfTables);
if (isAllowingVerboseDump()) {
System.out.println(DumpUtil.bytesToHex(gfTables, -1));
}
}
项目:hadoop-oss
文件:RSRawDecoder.java
private void processErasures(int[] erasedIndexes) {
this.decodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
this.invertMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
this.gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
this.erasureFlags = new boolean[getNumAllUnits()];
this.numErasedDataUnits = 0;
for (int i = 0; i < erasedIndexes.length; i++) {
int index = erasedIndexes[i];
erasureFlags[index] = true;
if (index < getNumDataUnits()) {
numErasedDataUnits++;
}
}
generateDecodeMatrix(erasedIndexes);
RSUtil.initTables(getNumDataUnits(), erasedIndexes.length,
decodeMatrix, 0, gfTables);
if (isAllowingVerboseDump()) {
System.out.println(DumpUtil.bytesToHex(gfTables, -1));
}
}
项目:hops
文件:RSRawEncoder.java
public RSRawEncoder(ErasureCoderOptions coderOptions) {
super(coderOptions);
if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
throw new HadoopIllegalArgumentException(
"Invalid numDataUnits and numParityUnits");
}
encodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
RSUtil.genCauchyMatrix(encodeMatrix, getNumAllUnits(), getNumDataUnits());
if (allowVerboseDump()) {
DumpUtil.dumpMatrix(encodeMatrix, getNumDataUnits(), getNumAllUnits());
}
gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
RSUtil.initTables(getNumDataUnits(), getNumParityUnits(), encodeMatrix,
getNumDataUnits() * getNumDataUnits(), gfTables);
if (allowVerboseDump()) {
System.out.println(DumpUtil.bytesToHex(gfTables, -1));
}
}
项目:hops
文件:RSRawDecoder.java
private void processErasures(int[] erasedIndexes) {
this.decodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
this.invertMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
this.gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
this.erasureFlags = new boolean[getNumAllUnits()];
this.numErasedDataUnits = 0;
for (int i = 0; i < erasedIndexes.length; i++) {
int index = erasedIndexes[i];
erasureFlags[index] = true;
if (index < getNumDataUnits()) {
numErasedDataUnits++;
}
}
generateDecodeMatrix(erasedIndexes);
RSUtil.initTables(getNumDataUnits(), erasedIndexes.length,
decodeMatrix, 0, gfTables);
if (allowVerboseDump()) {
System.out.println(DumpUtil.bytesToHex(gfTables, -1));
}
}
项目:hadoop-oss
文件:RSRawDecoder.java
public RSRawDecoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
throw new HadoopIllegalArgumentException(
"Invalid getNumDataUnits() and numParityUnits");
}
int numAllUnits = getNumDataUnits() + numParityUnits;
encodeMatrix = new byte[numAllUnits * getNumDataUnits()];
RSUtil.genCauchyMatrix(encodeMatrix, numAllUnits, getNumDataUnits());
if (isAllowingVerboseDump()) {
DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, numAllUnits);
}
}
项目:hops
文件:RSRawDecoder.java
public RSRawDecoder(ErasureCoderOptions coderOptions) {
super(coderOptions);
int numAllUnits = getNumAllUnits();
if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
throw new HadoopIllegalArgumentException(
"Invalid getNumDataUnits() and numParityUnits");
}
encodeMatrix = new byte[numAllUnits * getNumDataUnits()];
RSUtil.genCauchyMatrix(encodeMatrix, numAllUnits, getNumDataUnits());
if (allowVerboseDump()) {
DumpUtil.dumpMatrix(encodeMatrix, getNumDataUnits(), numAllUnits);
}
}
项目:hadoop-oss
文件:TestCoderBase.java
/**
* Dump chunks prefixed with a header if isAllowingVerboseDump is enabled.
* @param header
* @param chunks
*/
protected void dumpChunks(String header, ECChunk[] chunks) {
if (allowDump) {
DumpUtil.dumpChunks(header, chunks);
}
}
项目:aliyun-oss-hadoop-fs
文件:TestCoderBase.java
/**
* Dump chunks prefixed with a header if isAllowingVerboseDump is enabled.
* @param header
* @param chunks
*/
protected void dumpChunks(String header, ECChunk[] chunks) {
if (allowDump) {
DumpUtil.dumpChunks(header, chunks);
}
}
项目:hops
文件:TestCoderBase.java
/**
* Dump chunks prefixed with a header if isAllowingVerboseDump is enabled.
* @param header
* @param chunks
*/
protected void dumpChunks(String header, ECChunk[] chunks) {
if (allowDump) {
DumpUtil.dumpChunks(header, chunks);
}
}