Java 类org.apache.hadoop.fs.ByteBufferReadable 实例源码
项目:hadoop-oss
文件:CryptoInputStream.java
public CryptoInputStream(InputStream in, CryptoCodec codec,
int bufferSize, byte[] key, byte[] iv, long streamOffset) throws IOException {
super(in);
CryptoStreamUtils.checkCodec(codec);
this.bufferSize = CryptoStreamUtils.checkBufferSize(codec, bufferSize);
this.codec = codec;
this.key = key.clone();
this.initIV = iv.clone();
this.iv = iv.clone();
this.streamOffset = streamOffset;
isByteBufferReadable = in instanceof ByteBufferReadable;
isReadableByteChannel = in instanceof ReadableByteChannel;
inBuffer = ByteBuffer.allocateDirect(this.bufferSize);
outBuffer = ByteBuffer.allocateDirect(this.bufferSize);
decryptor = getDecryptor();
resetStreamOffset(streamOffset);
}
项目:hadoop
文件:CryptoInputStream.java
public CryptoInputStream(InputStream in, CryptoCodec codec,
int bufferSize, byte[] key, byte[] iv, long streamOffset) throws IOException {
super(in);
CryptoStreamUtils.checkCodec(codec);
this.bufferSize = CryptoStreamUtils.checkBufferSize(codec, bufferSize);
this.codec = codec;
this.key = key.clone();
this.initIV = iv.clone();
this.iv = iv.clone();
this.streamOffset = streamOffset;
isByteBufferReadable = in instanceof ByteBufferReadable;
isReadableByteChannel = in instanceof ReadableByteChannel;
inBuffer = ByteBuffer.allocateDirect(this.bufferSize);
outBuffer = ByteBuffer.allocateDirect(this.bufferSize);
decryptor = getDecryptor();
resetStreamOffset(streamOffset);
}
项目:aliyun-oss-hadoop-fs
文件:CryptoInputStream.java
public CryptoInputStream(InputStream in, CryptoCodec codec,
int bufferSize, byte[] key, byte[] iv, long streamOffset) throws IOException {
super(in);
CryptoStreamUtils.checkCodec(codec);
this.bufferSize = CryptoStreamUtils.checkBufferSize(codec, bufferSize);
this.codec = codec;
this.key = key.clone();
this.initIV = iv.clone();
this.iv = iv.clone();
this.streamOffset = streamOffset;
isByteBufferReadable = in instanceof ByteBufferReadable;
isReadableByteChannel = in instanceof ReadableByteChannel;
inBuffer = ByteBuffer.allocateDirect(this.bufferSize);
outBuffer = ByteBuffer.allocateDirect(this.bufferSize);
decryptor = getDecryptor();
resetStreamOffset(streamOffset);
}
项目:big-c
文件:CryptoInputStream.java
public CryptoInputStream(InputStream in, CryptoCodec codec,
int bufferSize, byte[] key, byte[] iv, long streamOffset) throws IOException {
super(in);
CryptoStreamUtils.checkCodec(codec);
this.bufferSize = CryptoStreamUtils.checkBufferSize(codec, bufferSize);
this.codec = codec;
this.key = key.clone();
this.initIV = iv.clone();
this.iv = iv.clone();
this.streamOffset = streamOffset;
isByteBufferReadable = in instanceof ByteBufferReadable;
isReadableByteChannel = in instanceof ReadableByteChannel;
inBuffer = ByteBuffer.allocateDirect(this.bufferSize);
outBuffer = ByteBuffer.allocateDirect(this.bufferSize);
decryptor = getDecryptor();
resetStreamOffset(streamOffset);
}
项目:indexr
文件:IOUtil.java
public static void readFully(FSDataInputStream reader, long offset, ByteBuffer buffer) throws IOException {
if (offset >= 0) {
reader.seek(offset);
}
InputStream is = reader.getWrappedStream();
if (!(is instanceof ByteBufferReadable)) {
logger.trace("Using read bytes method");
byte[] bytes = new byte[buffer.remaining()];
reader.readFully(bytes);
buffer.put(bytes);
} else {
while (buffer.hasRemaining()) {
int pos = buffer.position();
int rt = reader.read(buffer);
if (rt < 0) {
throw new IOException("End of stream");
}
buffer.position(pos + rt);
}
}
Preconditions.checkState(!buffer.hasRemaining());
}
项目:hadoop-2.6.0-cdh5.4.3
文件:CryptoInputStream.java
public CryptoInputStream(InputStream in, CryptoCodec codec,
int bufferSize, byte[] key, byte[] iv, long streamOffset) throws IOException {
super(in);
this.bufferSize = CryptoStreamUtils.checkBufferSize(codec, bufferSize);
this.codec = codec;
this.key = key.clone();
this.initIV = iv.clone();
this.iv = iv.clone();
this.streamOffset = streamOffset;
isByteBufferReadable = in instanceof ByteBufferReadable;
isReadableByteChannel = in instanceof ReadableByteChannel;
inBuffer = ByteBuffer.allocateDirect(this.bufferSize);
outBuffer = ByteBuffer.allocateDirect(this.bufferSize);
decryptor = getDecryptor();
resetStreamOffset(streamOffset);
}
项目:hops
文件:CryptoInputStream.java
public CryptoInputStream(InputStream in, CryptoCodec codec,
int bufferSize, byte[] key, byte[] iv, long streamOffset) throws IOException {
super(in);
CryptoStreamUtils.checkCodec(codec);
this.bufferSize = CryptoStreamUtils.checkBufferSize(codec, bufferSize);
this.codec = codec;
this.key = key.clone();
this.initIV = iv.clone();
this.iv = iv.clone();
this.streamOffset = streamOffset;
isByteBufferReadable = in instanceof ByteBufferReadable;
isReadableByteChannel = in instanceof ReadableByteChannel;
inBuffer = ByteBuffer.allocateDirect(this.bufferSize);
outBuffer = ByteBuffer.allocateDirect(this.bufferSize);
decryptor = getDecryptor();
resetStreamOffset(streamOffset);
}
项目:hadoop-oss
文件:CryptoStreamsTestBase.java
private void byteBufferReadCheck(InputStream in, ByteBuffer buf,
int bufPos) throws Exception {
buf.position(bufPos);
int n = ((ByteBufferReadable) in).read(buf);
Assert.assertEquals(bufPos + n, buf.position());
byte[] readData = new byte[n];
buf.rewind();
buf.position(bufPos);
buf.get(readData);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
Assert.assertArrayEquals(readData, expectedData);
}
项目:QDrill
文件:TextInput.java
/**
* Creates a new instance with the mandatory characters for handling newlines transparently.
* @param lineSeparator the sequence of characters that represent a newline, as defined in {@link Format#getLineSeparator()}
* @param normalizedLineSeparator the normalized newline character (as defined in {@link Format#getNormalizedNewline()}) that is used to replace any lineSeparator sequence found in the input.
*/
public TextInput(TextParsingSettings settings, InputStream input, DrillBuf readBuffer, long startPos, long endPos) {
byte[] lineSeparator = settings.getNewLineDelimiter();
byte normalizedLineSeparator = settings.getNormalizedNewLine();
Preconditions.checkArgument(lineSeparator != null && (lineSeparator.length == 1 || lineSeparator.length == 2), "Invalid line separator. Expected 1 to 2 characters");
Preconditions.checkArgument(input instanceof Seekable, "Text input only supports an InputStream that supports Seekable.");
boolean isCompressed = input instanceof CompressionInputStream ;
Preconditions.checkArgument(!isCompressed || startPos == 0, "Cannot use split on compressed stream.");
// splits aren't allowed with compressed data. The split length will be the compressed size which means we'll normally end prematurely.
if(isCompressed && endPos > 0){
endPos = Long.MAX_VALUE;
}
this.input = input;
this.seekable = (Seekable) input;
this.settings = settings;
if(input instanceof FSDataInputStream){
this.inputFS = (FSDataInputStream) input;
this.bufferReadable = inputFS.getWrappedStream() instanceof ByteBufferReadable;
}else{
this.inputFS = null;
this.bufferReadable = false;
}
this.startPos = startPos;
this.endPos = endPos;
this.lineSeparator1 = lineSeparator[0];
this.lineSeparator2 = lineSeparator.length == 2 ? lineSeparator[1] : NULL_BYTE;
this.normalizedLineSeparator = normalizedLineSeparator;
this.buffer = readBuffer;
this.bStart = buffer.memoryAddress();
this.bStartMinus1 = bStart -1;
this.underlyingBuffer = buffer.nioBuffer(0, buffer.capacity());
}
项目:hadoop
文件:CryptoStreamsTestBase.java
private void byteBufferReadCheck(InputStream in, ByteBuffer buf,
int bufPos) throws Exception {
buf.position(bufPos);
int n = ((ByteBufferReadable) in).read(buf);
Assert.assertEquals(bufPos + n, buf.position());
byte[] readData = new byte[n];
buf.rewind();
buf.position(bufPos);
buf.get(readData);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
Assert.assertArrayEquals(readData, expectedData);
}
项目:dremio-oss
文件:TextInput.java
/**
* Creates a new instance with the mandatory characters for handling newlines transparently.
* lineSeparator the sequence of characters that represent a newline, as defined in {@link Format#getLineSeparator()}
* normalizedLineSeparator the normalized newline character (as defined in {@link Format#getNormalizedNewline()}) that is used to replace any lineSeparator sequence found in the input.
*/
public TextInput(TextParsingSettings settings, InputStream input, ArrowBuf readBuffer, long startPos, long endPos) {
this.lineSeparator = settings.getNewLineDelimiter();
byte normalizedLineSeparator = settings.getNormalizedNewLine();
Preconditions.checkArgument(input instanceof Seekable, "Text input only supports an InputStream that supports Seekable.");
boolean isCompressed = input instanceof CompressionInputStream ;
Preconditions.checkArgument(!isCompressed || startPos == 0, "Cannot use split on compressed stream.");
// splits aren't allowed with compressed data. The split length will be the compressed size which means we'll normally end prematurely.
if(isCompressed && endPos > 0){
endPos = Long.MAX_VALUE;
}
this.input = input;
this.seekable = (Seekable) input;
this.settings = settings;
if(input instanceof FSDataInputStream){
this.inputFS = (FSDataInputStream) input;
this.bufferReadable = inputFS.getWrappedStream() instanceof ByteBufferReadable;
}else{
this.inputFS = null;
this.bufferReadable = false;
}
this.startPos = startPos;
this.endPos = endPos;
this.normalizedLineSeparator = normalizedLineSeparator;
this.buffer = readBuffer;
this.bStart = buffer.memoryAddress();
this.bStartMinus1 = bStart -1;
this.underlyingBuffer = buffer.nioBuffer(0, buffer.capacity());
}
项目:aliyun-oss-hadoop-fs
文件:CryptoStreamsTestBase.java
private void byteBufferReadCheck(InputStream in, ByteBuffer buf,
int bufPos) throws Exception {
buf.position(bufPos);
int n = ((ByteBufferReadable) in).read(buf);
Assert.assertEquals(bufPos + n, buf.position());
byte[] readData = new byte[n];
buf.rewind();
buf.position(bufPos);
buf.get(readData);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
Assert.assertArrayEquals(readData, expectedData);
}
项目:big-c
文件:CryptoStreamsTestBase.java
private void byteBufferReadCheck(InputStream in, ByteBuffer buf,
int bufPos) throws Exception {
buf.position(bufPos);
int n = ((ByteBufferReadable) in).read(buf);
Assert.assertEquals(bufPos + n, buf.position());
byte[] readData = new byte[n];
buf.rewind();
buf.position(bufPos);
buf.get(readData);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
Assert.assertArrayEquals(readData, expectedData);
}
项目:drill
文件:TextInput.java
/**
* Creates a new instance with the mandatory characters for handling newlines transparently.
* lineSeparator the sequence of characters that represent a newline, as defined in {@link Format#getLineSeparator()}
* normalizedLineSeparator the normalized newline character (as defined in {@link Format#getNormalizedNewline()}) that is used to replace any lineSeparator sequence found in the input.
*/
public TextInput(TextParsingSettings settings, InputStream input, DrillBuf readBuffer, long startPos, long endPos) {
this.lineSeparator = settings.getNewLineDelimiter();
byte normalizedLineSeparator = settings.getNormalizedNewLine();
Preconditions.checkArgument(input instanceof Seekable, "Text input only supports an InputStream that supports Seekable.");
boolean isCompressed = input instanceof CompressionInputStream ;
Preconditions.checkArgument(!isCompressed || startPos == 0, "Cannot use split on compressed stream.");
// splits aren't allowed with compressed data. The split length will be the compressed size which means we'll normally end prematurely.
if(isCompressed && endPos > 0){
endPos = Long.MAX_VALUE;
}
this.input = input;
this.seekable = (Seekable) input;
this.settings = settings;
if(input instanceof FSDataInputStream){
this.inputFS = (FSDataInputStream) input;
this.bufferReadable = inputFS.getWrappedStream() instanceof ByteBufferReadable;
}else{
this.inputFS = null;
this.bufferReadable = false;
}
this.startPos = startPos;
this.endPos = endPos;
this.normalizedLineSeparator = normalizedLineSeparator;
this.buffer = readBuffer;
this.bStart = buffer.memoryAddress();
this.bStartMinus1 = bStart -1;
this.underlyingBuffer = buffer.nioBuffer(0, buffer.capacity());
}
项目:hadoop-2.6.0-cdh5.4.3
文件:CryptoStreamsTestBase.java
private void byteBufferReadCheck(InputStream in, ByteBuffer buf,
int bufPos) throws Exception {
buf.position(bufPos);
int n = ((ByteBufferReadable) in).read(buf);
Assert.assertEquals(bufPos + n, buf.position());
byte[] readData = new byte[n];
buf.rewind();
buf.position(bufPos);
buf.get(readData);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
Assert.assertArrayEquals(readData, expectedData);
}
项目:hops
文件:CryptoStreamsTestBase.java
private void byteBufferReadCheck(InputStream in, ByteBuffer buf,
int bufPos) throws Exception {
buf.position(bufPos);
int n = ((ByteBufferReadable) in).read(buf);
Assert.assertEquals(bufPos + n, buf.position());
byte[] readData = new byte[n];
buf.rewind();
buf.position(bufPos);
buf.get(readData);
byte[] expectedData = new byte[n];
System.arraycopy(data, 0, expectedData, 0, n);
Assert.assertArrayEquals(readData, expectedData);
}
项目:tajo
文件:FSDataInputChannel.java
public FSDataInputChannel(FSDataInputStream inputStream) throws IOException {
if (inputStream.getWrappedStream() instanceof ByteBufferReadable) {
this.isDirectRead = true;
} else {
/* LocalFileSystem, S3 does not support ByteBufferReadable */
this.channel = Channels.newChannel(inputStream);
}
this.inputStream = inputStream;
this.size = inputStream.getPos() + inputStream.available();
}
项目:tajo
文件:ByteBufInputChannel.java
public ByteBufInputChannel(InputStream inputStream) {
if (inputStream instanceof ByteBufferReadable) {
this.byteBufferReadable = (ByteBufferReadable) inputStream;
} else {
this.channel = Channels.newChannel(inputStream);
}
this.inputStream = inputStream;
}
项目:drill
文件:DirectBufInputStream.java
protected void checkStreamSupportsByteBuffer() throws UnsupportedOperationException {
// Check input stream supports ByteBuffer
if (!(in instanceof ByteBufferReadable)) {
throw new UnsupportedOperationException("The input stream is not ByteBuffer readable.");
}
}