public long heapSize() { long heapsize = ClassSize.align(ClassSize.OBJECT + 2 * Bytes.SIZEOF_INT + (3 + 1) * ClassSize.REFERENCE); //Calculating the size of blockKeys if(blockKeys != null) { //Adding array + references overhead heapsize += ClassSize.align(ClassSize.ARRAY + blockKeys.length * ClassSize.REFERENCE); //Adding bytes for(byte [] bs : blockKeys) { heapsize += ClassSize.align(ClassSize.ARRAY + bs.length); } } if(blockOffsets != null) { heapsize += ClassSize.align(ClassSize.ARRAY + blockOffsets.length * Bytes.SIZEOF_LONG); } if(blockDataSizes != null) { heapsize += ClassSize.align(ClassSize.ARRAY + blockDataSizes.length * Bytes.SIZEOF_INT); } return ClassSize.align(heapsize); }
/** * Constructs a new, empty map with the specified initial capacity, * load factor, and maximum memory usage. * * @param initialCapacity the initial capacity * @param loadFactor the load factor * @param maxMemUsage the maximum total memory usage * @throws IllegalArgumentException if the initial capacity is less than one * @throws IllegalArgumentException if the initial capacity is greater than * the maximum capacity * @throws IllegalArgumentException if the load factor is <= 0 * @throws IllegalArgumentException if the max memory usage is too small * to support the base overhead */ public LruHashMap(int initialCapacity, float loadFactor, long maxMemUsage) { if (initialCapacity < 1) { throw new IllegalArgumentException("Initial capacity must be > 0"); } if (initialCapacity > MAXIMUM_CAPACITY) { throw new IllegalArgumentException("Initial capacity is too large"); } if (loadFactor <= 0 || Float.isNaN(loadFactor)) { throw new IllegalArgumentException("Load factor must be > 0"); } if (maxMemUsage <= (OVERHEAD + initialCapacity * ClassSize.REFERENCE)) { throw new IllegalArgumentException("Max memory usage too small to " + "support base overhead"); } /** Find a power of 2 >= initialCapacity */ int capacity = calculateCapacity(initialCapacity); this.loadFactor = loadFactor; this.threshold = calculateThreshold(capacity,loadFactor); this.entries = new Entry[capacity]; this.memFree = maxMemUsage; this.memTotal = maxMemUsage; init(); }
public LruCachedBlock(BlockCacheKey cacheKey, Cacheable buf, long accessTime, boolean inMemory) { this.cacheKey = cacheKey; this.buf = buf; this.accessTime = accessTime; // We approximate the size of this class by the size of its name string // plus the size of its byte buffer plus the overhead associated with all // the base classes. We also include the base class // sizes in the PER_BLOCK_OVERHEAD variable rather than align()ing them with // their buffer lengths. This variable is used elsewhere in unit tests. this.size = ClassSize.align(cacheKey.heapSize()) + ClassSize.align(buf.heapSize()) + PER_BLOCK_OVERHEAD; if(inMemory) { this.priority = BlockPriority.MEMORY; } else { this.priority = BlockPriority.SINGLE; } }
@Override public long heapSize() { long size = ClassSize.align( ClassSize.OBJECT + // Block type, byte buffer and meta references 3 * ClassSize.REFERENCE + // On-disk size, uncompressed size, and next block's on-disk size // bytePerChecksum and onDiskDataSize 4 * Bytes.SIZEOF_INT + // This and previous block offset 2 * Bytes.SIZEOF_LONG + // Heap size of the meta object. meta will be always not null. fileContext.heapSize() ); if (buf != null) { // Deep overhead of the byte buffer. Needs to be aligned separately. size += ClassSize.align(buf.capacity() + BYTE_BUFFER_HEAP_SIZE); } return ClassSize.align(size); }
/** Checks if the HeapSize calculator is within reason */ @Test public void testHeapSizeForBlockIndex() throws IOException { Class<HFileBlockIndex.BlockIndexReader> cl = HFileBlockIndex.BlockIndexReader.class; long expected = ClassSize.estimateBase(cl, false); HFileBlockIndex.BlockIndexReader bi = new HFileBlockIndex.BlockIndexReader(KeyValue.RAW_COMPARATOR, 1); long actual = bi.heapSize(); // Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets, // int [] blockDataSizes) are all null they are not going to show up in the // HeapSize calculation, so need to remove those array costs from expected. expected -= ClassSize.align(3 * ClassSize.ARRAY); if (expected != actual) { ClassSize.estimateBase(cl, true); assertEquals(expected, actual); } }
static CellScanner getSizedCellScanner(final Cell [] cells) { int size = -1; for (Cell cell: cells) { size += CellUtil.estimatedSerializedSizeOf(cell); } final int totalSize = ClassSize.align(size); final CellScanner cellScanner = CellUtil.createCellScanner(cells); return new SizedCellScanner() { @Override public long heapSize() { return totalSize; } @Override public Cell current() { return cellScanner.current(); } @Override public boolean advance() throws IOException { return cellScanner.advance(); } }; }
/** * Constructs a new, empty map with the specified initial capacity, * load factor, and maximum memory usage. * * @param initialCapacity the initial capacity * @param loadFactor the load factor * @param maxMemUsage the maximum total memory usage * @throws IllegalArgumentException if the initial capacity is less than one * @throws IllegalArgumentException if the initial capacity is greater than * the maximum capacity * @throws IllegalArgumentException if the load factor is <= 0 * @throws IllegalArgumentException if the max memory usage is too small * to support the base overhead */ public LruHashMap(int initialCapacity, float loadFactor, long maxMemUsage) { if (initialCapacity < 1) { throw new IllegalArgumentException("Initial capacity must be > 0"); } if (initialCapacity > MAXIMUM_CAPACITY) { throw new IllegalArgumentException("Initial capacity is too large"); } if (loadFactor <= 0 || Float.isNaN(loadFactor)) { throw new IllegalArgumentException("Load factor must be > 0"); } if (maxMemUsage <= (OVERHEAD + initialCapacity * ClassSize.REFERENCE)) { throw new IllegalArgumentException("Max memory usage too small to " + "support base overhead"); } /** Find a power of 2 >= initialCapacity */ int capacity = calculateCapacity(initialCapacity); this.loadFactor = loadFactor; this.threshold = calculateThreshold(capacity,loadFactor); this.entries = new Entry[capacity]; this.memFree = maxMemUsage; this.memTotal = maxMemUsage; init(); }
Slab(int blockSize, int numBlocks) { buffers = new LinkedBlockingQueue<ByteBuffer>(); slabs = new ConcurrentLinkedQueue<ByteBuffer>(); this.blockSize = blockSize; this.numBlocks = numBlocks; this.heapSize = ClassSize.estimateBase(this.getClass(), false); int maxBlocksPerSlab = Integer.MAX_VALUE / blockSize; int maxSlabSize = maxBlocksPerSlab * blockSize; int numFullSlabs = numBlocks / maxBlocksPerSlab; int partialSlabSize = (numBlocks % maxBlocksPerSlab) * blockSize; for (int i = 0; i < numFullSlabs; i++) { allocateAndSlice(maxSlabSize, blockSize); } if (partialSlabSize > 0) { allocateAndSlice(partialSlabSize, blockSize); } }
public CachedBlock(BlockCacheKey cacheKey, Cacheable buf, long accessTime, boolean inMemory) { this.cacheKey = cacheKey; this.buf = buf; this.accessTime = accessTime; // We approximate the size of this class by the size of its name string // plus the size of its byte buffer plus the overhead associated with all // the base classes. We also include the base class // sizes in the PER_BLOCK_OVERHEAD variable rather than align()ing them with // their buffer lengths. This variable is used elsewhere in unit tests. this.size = ClassSize.align(cacheKey.heapSize()) + ClassSize.align(buf.heapSize()) + PER_BLOCK_OVERHEAD; if(inMemory) { this.priority = BlockPriority.MEMORY; } else { this.priority = BlockPriority.SINGLE; } }
@Override public long heapSize() { long size = ClassSize.align( // Base class size, including object overhead. SCHEMA_CONFIGURED_UNALIGNED_HEAP_SIZE + // Block type and byte buffer references 2 * ClassSize.REFERENCE + // On-disk size, uncompressed size, and next block's on-disk size // bytePerChecksum, onDiskDataSize and minorVersion 6 * Bytes.SIZEOF_INT + // Checksum type 1 * Bytes.SIZEOF_BYTE + // This and previous block offset 2 * Bytes.SIZEOF_LONG + // "Include memstore timestamp" flag Bytes.SIZEOF_BOOLEAN); if (buf != null) { // Deep overhead of the byte buffer. Needs to be aligned separately. size += ClassSize.align(buf.capacity() + BYTE_BUFFER_HEAP_SIZE); } return ClassSize.align(size); }
/** Checks if the HeapSize calculator is within reason */ @Test public void testHeapSizeForBlockIndex() throws IOException { Class<HFileBlockIndex.BlockIndexReader> cl = HFileBlockIndex.BlockIndexReader.class; long expected = ClassSize.estimateBase(cl, false); HFileBlockIndex.BlockIndexReader bi = new HFileBlockIndex.BlockIndexReader(Bytes.BYTES_RAWCOMPARATOR, 1); long actual = bi.heapSize(); // Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets, // int [] blockDataSizes) are all null they are not going to show up in the // HeapSize calculation, so need to remove those array costs from expected. expected -= ClassSize.align(3 * ClassSize.ARRAY); if (expected != actual) { ClassSize.estimateBase(cl, true); assertEquals(expected, actual); } }
static CellScanner getSizedCellScanner(final Cell [] cells) { int size = -1; for (Cell cell: cells) { size += CellUtil.estimatedSizeOf(cell); } final int totalSize = ClassSize.align(size); final CellScanner cellScanner = CellUtil.createCellScanner(cells); return new SizedCellScanner() { @Override public long heapSize() { return totalSize; } @Override public Cell current() { return cellScanner.current(); } @Override public boolean advance() throws IOException { return cellScanner.advance(); } }; }
@Override protected long calculateHeapSizeForBlockKeys(long heapSize) { if (blockKeys != null) { heapSize += ClassSize.REFERENCE; // Adding array + references overhead heapSize += ClassSize.align(ClassSize.ARRAY + blockKeys.length * ClassSize.REFERENCE); // Adding blockKeys for (Cell key : blockKeys) { heapSize += ClassSize.align(PrivateCellUtil.estimatedHeapSizeOf(key)); } } // Add comparator and the midkey atomicreference heapSize += 2 * ClassSize.REFERENCE; return heapSize; }
@Override public long heapSize() { // This object, block type and byte buffer reference, on-disk and // uncompressed size, next block's on-disk size, offset and previous // offset, byte buffer object, and its byte array. Might also need to add // some fields inside the byte buffer. // We only add one BYTE_BUFFER_HEAP_SIZE because at any given moment, one of // the bytebuffers will be null. But we do account for both references. // If we are on heap, then we add the capacity of buf. if (buf != null) { return ClassSize.align(ClassSize.OBJECT + 2 * ClassSize.REFERENCE + 3 * Bytes.SIZEOF_INT + 2 * Bytes.SIZEOF_LONG + BYTE_BUFFER_HEAP_SIZE) + ClassSize.align(buf.capacity()); } else { return ClassSize.align(ClassSize.OBJECT + 2 * ClassSize.REFERENCE + 3 * Bytes.SIZEOF_INT + 2 * Bytes.SIZEOF_LONG + BYTE_BUFFER_HEAP_SIZE); } }
static CellScanner getSizedCellScanner(final Cell[] cells) { int size = -1; for (Cell cell : cells) { size += PrivateCellUtil.estimatedSerializedSizeOf(cell); } final int totalSize = ClassSize.align(size); final CellScanner cellScanner = CellUtil.createCellScanner(cells); return new SizedCellScanner() { @Override public long heapSize() { return totalSize; } @Override public Cell current() { return cellScanner.current(); } @Override public boolean advance() throws IOException { return cellScanner.advance(); } }; }
/** * Instantiates a new cached block. * * @param cacheKey the cache key * @param buf the buf * @param accessTime the access time * @param inMemory the in memory */ public CachedBlock(BlockCacheKey cacheKey, Cacheable buf, long accessTime, boolean inMemory) { this.cacheKey = cacheKey; this.buf = buf; this.accessTime = accessTime; // We approximate the size of this class by the size of its name string // plus the size of its byte buffer plus the overhead associated with all // the base classes. We also include the base class // sizes in the PER_BLOCK_OVERHEAD variable rather than align()ing them with // their buffer lengths. This variable is used elsewhere in unit tests. this.size = ClassSize.align(cacheKey.heapSize()) + ClassSize.align(buf.heapSize()) + PER_BLOCK_OVERHEAD; if(inMemory) { this.priority = BlockPriority.MEMORY; } else { this.priority = BlockPriority.SINGLE; } }