Java 类org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PacketHeaderProto 实例源码
项目:hadoop
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes",
packetLen);
PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder()
.setOffsetInBlock(offsetInBlock)
.setSeqno(seqno)
.setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(syncBlock);
}
proto = builder.build();
}
项目:aliyun-oss-hadoop-fs
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes",
packetLen);
PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder()
.setOffsetInBlock(offsetInBlock)
.setSeqno(seqno)
.setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(true);
}
proto = builder.build();
}
项目:big-c
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes",
packetLen);
PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder()
.setOffsetInBlock(offsetInBlock)
.setSeqno(seqno)
.setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(syncBlock);
}
proto = builder.build();
}
项目:hadoop-2.6.0-cdh5.4.3
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes",
packetLen);
PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder()
.setOffsetInBlock(offsetInBlock)
.setSeqno(seqno)
.setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(syncBlock);
}
proto = builder.build();
}
项目:hadoop-plus
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes",
packetLen);
PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder()
.setOffsetInBlock(offsetInBlock)
.setSeqno(seqno)
.setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(syncBlock);
}
proto = builder.build();
}
项目:FlexMap
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes",
packetLen);
PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder()
.setOffsetInBlock(offsetInBlock)
.setSeqno(seqno)
.setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(syncBlock);
}
proto = builder.build();
}
项目:hops
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes", packetLen);
PacketHeaderProto.Builder builder =
PacketHeaderProto.newBuilder().setOffsetInBlock(offsetInBlock)
.setSeqno(seqno).setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(syncBlock);
}
proto = builder.build();
}
项目:hadoop-TCP
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes",
packetLen);
PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder()
.setOffsetInBlock(offsetInBlock)
.setSeqno(seqno)
.setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(syncBlock);
}
proto = builder.build();
}
项目:hardfs
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes",
packetLen);
PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder()
.setOffsetInBlock(offsetInBlock)
.setSeqno(seqno)
.setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(syncBlock);
}
proto = builder.build();
}
项目:hadoop-on-lustre2
文件:PacketHeader.java
public PacketHeader(int packetLen, long offsetInBlock, long seqno,
boolean lastPacketInBlock, int dataLen, boolean syncBlock) {
this.packetLen = packetLen;
Preconditions.checkArgument(packetLen >= Ints.BYTES,
"packet len %s should always be at least 4 bytes",
packetLen);
PacketHeaderProto.Builder builder = PacketHeaderProto.newBuilder()
.setOffsetInBlock(offsetInBlock)
.setSeqno(seqno)
.setLastPacketInBlock(lastPacketInBlock)
.setDataLen(dataLen);
if (syncBlock) {
// Only set syncBlock if it is specified.
// This is wire-incompatible with Hadoop 2.0.0-alpha due to HDFS-3721
// because it changes the length of the packet header, and BlockReceiver
// in that version did not support variable-length headers.
builder.setSyncBlock(syncBlock);
}
proto = builder.build();
}
项目:hadoop
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:aliyun-oss-hadoop-fs
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:aliyun-oss-hadoop-fs
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:big-c
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:big-c
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop-plus
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop-plus
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:FlexMap
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:FlexMap
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hops
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hops
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop-TCP
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop-TCP
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hardfs
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hardfs
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop-on-lustre2
文件:PacketHeader.java
public void readFields(ByteBuffer buf) throws IOException {
packetLen = buf.getInt();
short protoLen = buf.getShort();
byte[] data = new byte[protoLen];
buf.get(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop-on-lustre2
文件:PacketHeader.java
public void readFields(DataInputStream in) throws IOException {
this.packetLen = in.readInt();
short protoLen = in.readShort();
byte[] data = new byte[protoLen];
in.readFully(data);
proto = PacketHeaderProto.parseFrom(data);
}
项目:hadoop
文件:PacketHeader.java
public void setFieldsFromData(
int packetLen, byte[] headerData) throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}
项目:aliyun-oss-hadoop-fs
文件:PacketHeader.java
public void setFieldsFromData(
int packetLen, byte[] headerData) throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}
项目:big-c
文件:PacketHeader.java
public void setFieldsFromData(
int packetLen, byte[] headerData) throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:PacketHeader.java
public void setFieldsFromData(
int packetLen, byte[] headerData) throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}
项目:hadoop-plus
文件:PacketHeader.java
public void setFieldsFromData(
int packetLen, byte[] headerData) throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}
项目:FlexMap
文件:PacketHeader.java
public void setFieldsFromData(
int packetLen, byte[] headerData) throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}
项目:hops
文件:PacketHeader.java
public void setFieldsFromData(int packetLen, byte[] headerData)
throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}
项目:hadoop-TCP
文件:PacketHeader.java
public void setFieldsFromData(
int packetLen, byte[] headerData) throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}
项目:hardfs
文件:PacketHeader.java
public void setFieldsFromData(
int packetLen, byte[] headerData) throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}
项目:hadoop-on-lustre2
文件:PacketHeader.java
public void setFieldsFromData(
int packetLen, byte[] headerData) throws InvalidProtocolBufferException {
this.packetLen = packetLen;
proto = PacketHeaderProto.parseFrom(headerData);
}