public void init(ByteBlockPool pool, int startIndex, int endIndex) { assert endIndex-startIndex >= 0; assert startIndex >= 0; assert endIndex >= 0; this.pool = pool; this.endIndex = endIndex; level = 0; bufferUpto = startIndex / ByteBlockPool.BYTE_BLOCK_SIZE; bufferOffset = bufferUpto * ByteBlockPool.BYTE_BLOCK_SIZE; buffer = pool.buffers[bufferUpto]; upto = startIndex & ByteBlockPool.BYTE_BLOCK_MASK; final int firstSize = ByteBlockPool.LEVEL_SIZE_ARRAY[0]; if (startIndex+firstSize >= endIndex) { // There is only this one slice to read limit = endIndex & ByteBlockPool.BYTE_BLOCK_MASK; } else limit = upto+firstSize-4; }
private void readFromBytes(BytesRef bytes) { // Read pruned flag this.setIsPruned(bytes.bytes[bytes.offset++] == 1 ? true : false); // Read size fo the set int size = Bytes.readInt(bytes); // Read terms bytesUsed = Counter.newCounter(); pool = new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator(bytesUsed)); set = new BytesRefHash(pool); BytesRef reusable = new BytesRef(); for (int i = 0; i < size; i++) { Bytes.readBytesRef(bytes, reusable); set.add(reusable); } }
public TermsHash(final DocumentsWriterPerThread docWriter, final TermsHashConsumer consumer, boolean trackAllocations, final TermsHash nextTermsHash) { this.docState = docWriter.docState; this.consumer = consumer; this.trackAllocations = trackAllocations; this.nextTermsHash = nextTermsHash; this.bytesUsed = trackAllocations ? docWriter.bytesUsed : Counter.newCounter(); intPool = new IntBlockPool(docWriter.intBlockAllocator); bytePool = new ByteBlockPool(docWriter.byteBlockAllocator); if (nextTermsHash != null) { // We are primary primary = true; termBytePool = bytePool; nextTermsHash.termBytePool = bytePool; } else { primary = false; } }
TermsHash(final DocumentsWriterPerThread docWriter, boolean trackAllocations, TermsHash nextTermsHash) { this.docState = docWriter.docState; this.trackAllocations = trackAllocations; this.nextTermsHash = nextTermsHash; this.bytesUsed = trackAllocations ? docWriter.bytesUsed : Counter.newCounter(); intPool = new IntBlockPool(docWriter.intBlockAllocator); bytePool = new ByteBlockPool(docWriter.byteBlockAllocator); if (nextTermsHash != null) { // We are primary termBytePool = bytePool; nextTermsHash.termBytePool = bytePool; } }
public SortedSetDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed) { this.fieldInfo = fieldInfo; this.iwBytesUsed = iwBytesUsed; hash = new BytesRefHash( new ByteBlockPool( new ByteBlockPool.DirectTrackingAllocator(iwBytesUsed)), BytesRefHash.DEFAULT_CAPACITY, new DirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY, iwBytesUsed)); pending = PackedLongValues.packedBuilder(PackedInts.COMPACT); pendingCounts = PackedLongValues.deltaPackedBuilder(PackedInts.COMPACT); bytesUsed = pending.ramBytesUsed() + pendingCounts.ramBytesUsed(); iwBytesUsed.addAndGet(bytesUsed); }
public SortedDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed) { this.fieldInfo = fieldInfo; this.iwBytesUsed = iwBytesUsed; hash = new BytesRefHash( new ByteBlockPool( new ByteBlockPool.DirectTrackingAllocator(iwBytesUsed)), BytesRefHash.DEFAULT_CAPACITY, new DirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY, iwBytesUsed)); pending = PackedLongValues.deltaPackedBuilder(PackedInts.COMPACT); bytesUsed = pending.ramBytesUsed(); iwBytesUsed.addAndGet(bytesUsed); }
public void add(int textStart) throws IOException { int termID = bytesHash.addByPoolOffset(textStart); if (termID >= 0) { // New posting // First time we are seeing this token since we last // flushed the hash. // Init stream slices if (numPostingInt + intPool.intUpto > IntBlockPool.INT_BLOCK_SIZE) { intPool.nextBuffer(); } if (ByteBlockPool.BYTE_BLOCK_SIZE - bytePool.byteUpto < numPostingInt*ByteBlockPool.FIRST_LEVEL_SIZE) { bytePool.nextBuffer(); } intUptos = intPool.buffer; intUptoStart = intPool.intUpto; intPool.intUpto += streamCount; postingsArray.intStarts[termID] = intUptoStart + intPool.intOffset; for(int i=0;i<streamCount;i++) { final int upto = bytePool.newSlice(ByteBlockPool.FIRST_LEVEL_SIZE); intUptos[intUptoStart+i] = upto + bytePool.byteOffset; } postingsArray.byteStarts[termID] = intUptos[intUptoStart]; newTerm(termID); } else { termID = (-termID)-1; int intStart = postingsArray.intStarts[termID]; intUptos = intPool.buffers[intStart >> IntBlockPool.INT_BLOCK_SHIFT]; intUptoStart = intStart & IntBlockPool.INT_BLOCK_MASK; addTerm(termID); } }
/** * Creates a new {@link BytesRefArray} with a counter to track allocated bytes */ public BytesRefArray(Counter bytesUsed) { this.pool = new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator( bytesUsed)); pool.nextBuffer(); bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.NUM_BYTES_INT); this.bytesUsed = bytesUsed; }
@Override public void readFrom(StreamInput in) throws IOException { this.setIsPruned(in.readBoolean()); int size = in.readInt(); bytesUsed = Counter.newCounter(); pool = new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator(bytesUsed)); set = new BytesRefHash(pool); for (long i = 0; i < size; i++) { set.add(in.readBytesRef()); } }
private Allocator randomByteBlockAllocator() { if (random().nextBoolean()) { return new RecyclingByteBlockAllocator(); } else { return new ByteBlockPool.DirectAllocator(); } }
public SortedSetDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed) { this.fieldInfo = fieldInfo; this.iwBytesUsed = iwBytesUsed; hash = new BytesRefHash( new ByteBlockPool( new ByteBlockPool.DirectTrackingAllocator(iwBytesUsed)), BytesRefHash.DEFAULT_CAPACITY, new DirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY, iwBytesUsed)); pending = new AppendingLongBuffer(); pendingCounts = new AppendingLongBuffer(); bytesUsed = pending.ramBytesUsed() + pendingCounts.ramBytesUsed(); iwBytesUsed.addAndGet(bytesUsed); }
public SortedDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed) { this.fieldInfo = fieldInfo; this.iwBytesUsed = iwBytesUsed; hash = new BytesRefHash( new ByteBlockPool( new ByteBlockPool.DirectTrackingAllocator(iwBytesUsed)), BytesRefHash.DEFAULT_CAPACITY, new DirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY, iwBytesUsed)); pending = new AppendingLongBuffer(); bytesUsed = pending.ramBytesUsed(); iwBytesUsed.addAndGet(bytesUsed); }
public void add(int textStart) throws IOException { int termID = bytesHash.addByPoolOffset(textStart); if (termID >= 0) { // New posting // First time we are seeing this token since we last // flushed the hash. // Init stream slices if (numPostingInt + intPool.intUpto > IntBlockPool.INT_BLOCK_SIZE) intPool.nextBuffer(); if (ByteBlockPool.BYTE_BLOCK_SIZE - bytePool.byteUpto < numPostingInt*ByteBlockPool.FIRST_LEVEL_SIZE) { bytePool.nextBuffer(); } intUptos = intPool.buffer; intUptoStart = intPool.intUpto; intPool.intUpto += streamCount; postingsArray.intStarts[termID] = intUptoStart + intPool.intOffset; for(int i=0;i<streamCount;i++) { final int upto = bytePool.newSlice(ByteBlockPool.FIRST_LEVEL_SIZE); intUptos[intUptoStart+i] = upto + bytePool.byteOffset; } postingsArray.byteStarts[termID] = intUptos[intUptoStart]; consumer.newTerm(termID); } else { termID = (-termID)-1; int intStart = postingsArray.intStarts[termID]; intUptos = intPool.buffers[intStart >> IntBlockPool.INT_BLOCK_SHIFT]; intUptoStart = intStart & IntBlockPool.INT_BLOCK_MASK; consumer.addTerm(termID); } }
public SortedSetDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed) { this.fieldInfo = fieldInfo; this.iwBytesUsed = iwBytesUsed; hash = new BytesRefHash( new ByteBlockPool( new ByteBlockPool.DirectTrackingAllocator(iwBytesUsed)), BytesRefHash.DEFAULT_CAPACITY, new DirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY, iwBytesUsed)); pending = new AppendingPackedLongBuffer(PackedInts.COMPACT); pendingCounts = new AppendingDeltaPackedLongBuffer(PackedInts.COMPACT); bytesUsed = pending.ramBytesUsed() + pendingCounts.ramBytesUsed(); iwBytesUsed.addAndGet(bytesUsed); }
public SortedDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed) { this.fieldInfo = fieldInfo; this.iwBytesUsed = iwBytesUsed; hash = new BytesRefHash( new ByteBlockPool( new ByteBlockPool.DirectTrackingAllocator(iwBytesUsed)), BytesRefHash.DEFAULT_CAPACITY, new DirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY, iwBytesUsed)); pending = new AppendingDeltaPackedLongBuffer(PackedInts.COMPACT); bytesUsed = pending.ramBytesUsed(); iwBytesUsed.addAndGet(bytesUsed); }