public byte[] fetchData(String blobKey, long startIndex, long l) { CountingInputStream inputStream = new CountingInputStream(getInputStream(blobKey)); byte[] bytes = new byte[(int) l]; try { int readSize = inputStream.read(bytes, (int) startIndex, (int) l); if (readSize < l) { bytes = ArrayUtils.subarray(bytes, 0, readSize - 1); } } catch (IOException e) { LOGGER.warn("Failed to read bytes", e); } finally { try { inputStream.close(); } catch (IOException ignored) { LOGGER.warn("Exception while closing inputStream", ignored); } } return bytes; }
@Test public void testEmptyWorks() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CountingOutputStream cos = new CountingOutputStream(baos); DataOutputStream dos = new DataOutputStream(cos); MessageCodec cmc = new MessageCodec(); Codec.Encoder encoder = cmc.getEncoder(dos); encoder.flush(); dos.close(); long offset = cos.getCount(); assertEquals(0, offset); CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = cmc.getDecoder(dis); assertFalse(decoder.advance()); dis.close(); assertEquals(0, cis.getCount()); }
@Test public void testOne() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CountingOutputStream cos = new CountingOutputStream(baos); DataOutputStream dos = new DataOutputStream(cos); MessageCodec cmc = new MessageCodec(); Codec.Encoder encoder = cmc.getEncoder(dos); final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); encoder.write(kv); encoder.flush(); dos.close(); long offset = cos.getCount(); CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = cmc.getDecoder(dis); assertTrue(decoder.advance()); // First read should pull in the KV assertFalse(decoder.advance()); // Second read should trip over the end-of-stream marker and return false dis.close(); assertEquals(offset, cis.getCount()); }
@Test public void testEmptyWorks() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CountingOutputStream cos = new CountingOutputStream(baos); DataOutputStream dos = new DataOutputStream(cos); KeyValueCodec kvc = new KeyValueCodec(); Codec.Encoder encoder = kvc.getEncoder(dos); encoder.flush(); dos.close(); long offset = cos.getCount(); assertEquals(0, offset); CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = kvc.getDecoder(dis); assertFalse(decoder.advance()); dis.close(); assertEquals(0, cis.getCount()); }
@Test public void testOne() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CountingOutputStream cos = new CountingOutputStream(baos); DataOutputStream dos = new DataOutputStream(cos); KeyValueCodec kvc = new KeyValueCodec(); Codec.Encoder encoder = kvc.getEncoder(dos); final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); final long length = kv.getLength() + Bytes.SIZEOF_INT; encoder.write(kv); encoder.flush(); dos.close(); long offset = cos.getCount(); assertEquals(length, offset); CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = kvc.getDecoder(dis); assertTrue(decoder.advance()); // First read should pull in the KV // Second read should trip over the end-of-stream marker and return false assertFalse(decoder.advance()); dis.close(); assertEquals(length, cis.getCount()); }
@Test public void testEmptyWorks() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CountingOutputStream cos = new CountingOutputStream(baos); DataOutputStream dos = new DataOutputStream(cos); Codec codec = new CellCodec(); Codec.Encoder encoder = codec.getEncoder(dos); encoder.flush(); dos.close(); long offset = cos.getCount(); assertEquals(0, offset); CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = codec.getDecoder(dis); assertFalse(decoder.advance()); dis.close(); assertEquals(0, cis.getCount()); }
@Test public void testOne() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CountingOutputStream cos = new CountingOutputStream(baos); DataOutputStream dos = new DataOutputStream(cos); Codec codec = new CellCodec(); Codec.Encoder encoder = codec.getEncoder(dos); final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); kv.setSequenceId(Long.MAX_VALUE); encoder.write(kv); encoder.flush(); dos.close(); long offset = cos.getCount(); CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = codec.getDecoder(dis); assertTrue(decoder.advance()); // First read should pull in the KV // Second read should trip over the end-of-stream marker and return false assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); }
@Test public void encodeDecodeTest() throws IOException { ArthmeticCoder.SimpleFrequency freq = new ArthmeticCoder.SimpleFrequency(counts); ByteArrayOutputStream encodedPool = new ByteArrayOutputStream(); CountingOutputStream outputCounting = new CountingOutputStream(encodedPool); ArthmeticCoder.Encoder encoder = new ArthmeticCoder.Encoder(freq, new BitWrappedOutputStream(outputCounting)); for (int s : symbols) { encoder.write(s); } encoder.seal(); ByteArrayInputStream decodedPool = new ByteArrayInputStream(encodedPool.toByteArray()); CountingInputStream inputCounting = new CountingInputStream(decodedPool); ArthmeticCoder.Decoder decoder = new ArthmeticCoder.Decoder(freq, new BitWrappedInputStream(inputCounting)); int[] symbols2 = new int[symbols.length]; for (int i = 0; i < symbols.length; i++) { symbols2[i] = decoder.read(); } Assert.assertEquals(outputCounting.getCount(), inputCounting.getCount()); Assert.assertArrayEquals(symbols, symbols2); }
@VisibleForTesting static <T> T decode( Coder<T> coder, Coder.Context context, byte[] bytes) throws CoderException, IOException { @SuppressWarnings("unchecked") Coder<T> deserializedCoder = SerializableUtils.clone(coder); byte[] buffer; if (context == Coder.Context.NESTED) { buffer = new byte[bytes.length + 1]; System.arraycopy(bytes, 0, buffer, 0, bytes.length); buffer[bytes.length] = 1; } else { buffer = bytes; } CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(buffer)); T value = deserializedCoder.decode(new UnownedInputStream(cis), context); assertThat("consumed bytes equal to encoded bytes", cis.getCount(), equalTo((long) bytes.length)); return value; }
public AddBlobVertexOperation(DuctileDBTransactionImpl transaction, long vertexId, InputStream blobContent, Set<String> types, Map<String, Object> properties) { super(transaction); try { this.types = Collections.unmodifiableSet(types); this.types.add(GraphStore.BLOB_TYPE_NAME); this.properties = Collections.unmodifiableMap(properties); this.vertex = new DuctileDBCacheVertex(transaction, vertexId, types, properties, new ArrayList<>()); blobFile = File.createTempFile("add-vertex-" + vertex.getId(), ".blob"); blobFile.deleteOnExit(); try (CountingInputStream countingInputStream = new CountingInputStream(blobContent); // HashIdCreatorInputStream hashIdStream = new HashIdCreatorInputStream(countingInputStream); // FileOutputStream outputStream = new FileOutputStream(this.blobFile)) { ByteStreams.copy(hashIdStream, outputStream); hashId = hashIdStream.getHashId(); long size = countingInputStream.getCount(); properties.put(GraphStore.BLOB_HASHID_PROPERTY_NAME, hashId.toString()); properties.put(GraphStore.BLOB_SIZE_PROPERTY_NAME, size); } } catch (IOException e) { throw new DuctileDBTransactionException("Could not create temporary file.", e); } }
private static void interceptClose(final Stream<?> stream, final CountingInputStream in, final long startTime, final Object... args) { final Map<String, String> mdc = Logging.getMDC(); stream.onClose(new Runnable() { @Override public void run() { final Map<String, String> oldMdc = Logging.getMDC(); try { Logging.setMDC(mdc); logRead(in, startTime, args); // closing the stream should not be done, but GZIPFilter seems not to detect // EOF and does not release the underlying stream, causing the connection not // to be released Util.closeQuietly(in); } finally { Logging.setMDC(oldMdc); } } }); }
private static void logRead(final CountingInputStream in, final long startTime, final Object... args) { if (LOGGER.isDebugEnabled()) { boolean eof = false; try { eof = in.read() == -1; } catch (final Throwable ex) { // ignore } final long elapsed = System.currentTimeMillis() - startTime; final StringBuilder builder = new StringBuilder(); builder.append("Http: read complete, "); for (int i = 0; i < args.length; i += 2) { builder.append(args[i]).append(" ").append(args[i + 1]).append(", "); } builder.append(in.getCount()).append(" byte(s), "); if (eof) { builder.append("EOF, "); } builder.append(elapsed).append(" ms"); LOGGER.debug(builder.toString()); } }
public ExampleRecordCursor(List<ExampleColumnHandle> columnHandles, ByteSource byteSource) { this.columnHandles = columnHandles; fieldToColumnIndex = new int[columnHandles.size()]; for (int i = 0; i < columnHandles.size(); i++) { ExampleColumnHandle columnHandle = columnHandles.get(i); fieldToColumnIndex[i] = columnHandle.getOrdinalPosition(); } try (CountingInputStream input = new CountingInputStream(byteSource.openStream())) { lines = byteSource.asCharSource(UTF_8).readLines().iterator(); totalBytes = input.getCount(); } catch (IOException e) { throw Throwables.propagate(e); } }
@Test public void testOne() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CountingOutputStream cos = new CountingOutputStream(baos); DataOutputStream dos = new DataOutputStream(cos); Codec codec = new CellCodec(); Codec.Encoder encoder = codec.getEncoder(dos); final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); kv.setMvccVersion(Long.MAX_VALUE); encoder.write(kv); encoder.flush(); dos.close(); long offset = cos.getCount(); CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = codec.getDecoder(dis); assertTrue(decoder.advance()); // First read should pull in the KV // Second read should trip over the end-of-stream marker and return false assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); }
public static void main(String[] args) { try { String fileName = args != null && args.length > 0 ? args[0] : "in.hprof"; CountingInputStream in = new CountingInputStream(new FileInputStream(fileName)); ValidatingProcessor processor = new ValidatingProcessor(in); HprofReader reader = new HprofReader(in, processor); while (reader.hasNext()) { reader.next(); } // All data loaded, start to check that it is consistent processor.verifyClasses(); processor.verifyInstances(); } catch (IOException e) { System.err.print("Failed to process file"); e.printStackTrace(System.err); throw new RuntimeException(e); } }
public void createFile(final String fullFilePath, final InputContext inputContext) throws DavException { if (cassandraDao.getFile(fullFilePath) != null) { throw new DavException(DavServletResponse.SC_CONFLICT); } final String parentDirectory = getParentDirectory(fullFilePath); final String fileName = PathUtils.getFileName(fullFilePath); final UUID parentId = cassandraDao.getFile(parentDirectory); try { final UUID fileUUID = cassandraFileDao.createFile(parentId, fileName); if (inputContext.hasStream() && inputContext.getContentLength() >= 0) { final CountingInputStream countingInputStream = new CountingInputStream(inputContext.getInputStream()); cassandraFileDao.writeFile(fileUUID, countingInputStream); cassandraFileDao.updateFileInfo(fileUUID, countingInputStream.getCount()); } } catch (ConnectionException e) { throw new RuntimeException(e); } }
@Test public void testOne() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CountingOutputStream cos = new CountingOutputStream(baos); DataOutputStream dos = new DataOutputStream(cos); Codec codec = new CellCodec(); Codec.Encoder encoder = codec.getEncoder(dos); final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); encoder.write(kv); encoder.flush(); dos.close(); long offset = cos.getCount(); CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray())); DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = codec.getDecoder(dis); assertTrue(decoder.advance()); // First read should pull in the KV // Second read should trip over the end-of-stream marker and return false assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); }
@Override protected JTSModel doInBackground(File... params) { File f = params[0]; fileName = f.getName(); String absPath = f.getAbsolutePath(); Log.i("BEGIN_PARSING", fileName); setFileSize(f.length()); try { InputStream is = new FileInputStream(f); countingInputStream = new CountingInputStream(is); OSMDataSet ds = OSMXmlParserInOSMMapBuilder.parseFromInputStream(countingInputStream, this); if (isOSMEdit) { jtsModel.mergeEditedOSMDataSet(absPath, ds); } else { jtsModel.addOSMDataSet(absPath, ds); } loadedOSMFiles.add(absPath); } catch (Exception e) { e.printStackTrace(); } return jtsModel; }