public void testSegmentRefresh_duplicate() throws ExecutionException { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1)); Segment<Object, Object> segment = map.segments[0]; Object key = new Object(); int hash = map.hash(key); AtomicReferenceArray<ReferenceEntry<Object, Object>> table = segment.table; int index = hash & (table.length() - 1); // already loading DummyEntry<Object, Object> entry = DummyEntry.create(key, hash, null); DummyValueReference<Object, Object> valueRef = DummyValueReference.create(null); valueRef.setLoading(true); entry.setValueReference(valueRef); table.set(index, entry); assertNull(segment.refresh(key, hash, identityLoader(), false)); }
public void testRemovalListener_collected() { QueuingRemovalListener<Object, Object> listener = queuingRemovalListener(); LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .softValues() .removalListener(listener)); Segment<Object, Object> segment = map.segments[0]; assertTrue(listener.isEmpty()); Object one = new Object(); Object two = new Object(); Object three = new Object(); map.put(one, two); map.put(two, three); assertTrue(listener.isEmpty()); int hash = map.hash(one); ReferenceEntry<Object, Object> entry = segment.getEntry(one, hash); map.reclaimValue(entry.getValueReference()); assertNotified(listener, one, two, RemovalCause.COLLECTED); assertTrue(listener.isEmpty()); }
public void testPutCausesExpansion() { for (int count = 1; count <= 100; count++) { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).initialCapacity(1)); Segment<Object, Object> segment = map.segments[0]; assertEquals(1, segment.table.length()); for (int i = 0; i < count; i++) { Object key = new Object(); Object value = new Object(); segment.put(key, key.hashCode(), value, true); } assertEquals(count, segment.count); assertTrue(count <= segment.threshold); assertTrue(count <= (segment.table.length() * 3 / 4)); assertTrue(count > (segment.table.length() * 3 / 8)); } }
public void testDrainRecencyQueueOnWrite() { for (CacheBuilder<Object, Object> builder : allEvictingMakers()) { LocalCache<Object, Object> map = makeLocalCache(builder.concurrencyLevel(1)); Segment<Object, Object> segment = map.segments[0]; if (segment.recencyQueue != DISCARDING_QUEUE) { Object keyOne = new Object(); Object valueOne = new Object(); Object keyTwo = new Object(); Object valueTwo = new Object(); map.put(keyOne, valueOne); assertTrue(segment.recencyQueue.isEmpty()); for (int i = 0; i < DRAIN_THRESHOLD / 2; i++) { map.get(keyOne); } assertFalse(segment.recencyQueue.isEmpty()); map.put(keyTwo, valueTwo); assertTrue(segment.recencyQueue.isEmpty()); } } }
/** * Lookups on the map view shouldn't impact the recency queue. */ public void testAsMapRecency() { CacheBuilder<Object, Object> builder = createCacheBuilder() .concurrencyLevel(1) .maximumSize(SMALL_MAX_SIZE); LocalLoadingCache<Object, Object> cache = makeCache(builder, identityLoader()); Segment<Object, Object> segment = cache.localCache.segments[0]; ConcurrentMap<Object, Object> map = cache.asMap(); Object one = new Object(); assertSame(one, cache.getUnchecked(one)); assertTrue(segment.recencyQueue.isEmpty()); assertSame(one, map.get(one)); assertSame(one, segment.recencyQueue.peek().getKey()); assertSame(one, cache.getUnchecked(one)); assertFalse(segment.recencyQueue.isEmpty()); }
static void checkValidState(LocalCache<?, ?> cchm) { for (Segment<?, ?> segment : cchm.segments) { segment.cleanUp(); assertFalse(segment.isLocked()); Map<?, ?> table = segmentTable(segment); // cleanup and then check count after we have a strong reference to all entries segment.cleanUp(); // under high memory pressure keys/values may be nulled out but not yet enqueued assertThat(table.size()).isAtMost(segment.count); for (Entry<?, ?> entry : table.entrySet()) { assertNotNull(entry.getKey()); assertNotNull(entry.getValue()); assertSame(entry.getValue(), cchm.get(entry.getKey())); } } checkEviction(cchm); checkExpiration(cchm); }
/** * Assuming the given cache has maximum size {@code maxSize}, this method populates the cache (by * getting a bunch of different keys), then makes sure all the items in the cache are also in the * eviction queue. It will invoke the given {@code operation} on the first element in the * eviction queue, and then reverify that all items in the cache are in the eviction queue, and * verify that the head of the eviction queue has changed as a result of the operation. */ static void checkRecency(LoadingCache<Integer, Integer> cache, int maxSize, Receiver<ReferenceEntry<Integer, Integer>> operation) { checkNotNull(operation); if (hasLocalCache(cache)) { warmUp(cache, 0, 2 * maxSize); LocalCache<Integer, Integer> cchm = toLocalCache(cache); Segment<?, ?> segment = cchm.segments[0]; drainRecencyQueue(segment); assertEquals(maxSize, accessQueueSize(cache)); assertEquals(maxSize, cache.size()); ReferenceEntry<?, ?> originalHead = segment.accessQueue.peek(); @SuppressWarnings("unchecked") ReferenceEntry<Integer, Integer> entry = (ReferenceEntry) originalHead; operation.accept(entry); drainRecencyQueue(segment); assertNotSame(originalHead, segment.accessQueue.peek()); assertEquals(cache.size(), accessQueueSize(cache)); } }
static void checkEmpty(ConcurrentMap<?, ?> map) { checkEmpty(map.keySet()); checkEmpty(map.values()); checkEmpty(map.entrySet()); assertEquals(ImmutableMap.of(), map); assertEquals(ImmutableMap.of().hashCode(), map.hashCode()); assertEquals(ImmutableMap.of().toString(), map.toString()); if (map instanceof LocalCache) { LocalCache<?, ?> cchm = (LocalCache<?, ?>) map; checkValidState(cchm); assertTrue(cchm.isEmpty()); assertEquals(0, cchm.size()); for (LocalCache.Segment<?, ?> segment : cchm.segments) { assertEquals(0, segment.count); assertEquals(0, segmentSize(segment)); assertTrue(segment.writeQueue.isEmpty()); assertTrue(segment.accessQueue.isEmpty()); } } }
public void testComputePartiallyCollectedKey() throws ExecutionException { CacheBuilder<Object, Object> builder = createCacheBuilder().concurrencyLevel(1); CountingLoader loader = new CountingLoader(); LocalCache<Object, Object> map = makeLocalCache(builder); Segment<Object, Object> segment = map.segments[0]; AtomicReferenceArray<ReferenceEntry<Object, Object>> table = segment.table; assertEquals(0, loader.getCount()); Object key = new Object(); int hash = map.hash(key); Object value = new Object(); int index = hash & (table.length() - 1); DummyEntry<Object, Object> entry = DummyEntry.create(key, hash, null); DummyValueReference<Object, Object> valueRef = DummyValueReference.create(value); entry.setValueReference(valueRef); table.set(index, entry); segment.count++; assertSame(value, map.get(key, loader)); assertEquals(0, loader.getCount()); assertEquals(1, segment.count); entry.clearKey(); assertNotSame(value, map.get(key, loader)); assertEquals(1, loader.getCount()); assertEquals(2, segment.count); }
public void testComputePartiallyCollectedValue() throws ExecutionException { CacheBuilder<Object, Object> builder = createCacheBuilder().concurrencyLevel(1); CountingLoader loader = new CountingLoader(); LocalCache<Object, Object> map = makeLocalCache(builder); Segment<Object, Object> segment = map.segments[0]; AtomicReferenceArray<ReferenceEntry<Object, Object>> table = segment.table; assertEquals(0, loader.getCount()); Object key = new Object(); int hash = map.hash(key); Object value = new Object(); int index = hash & (table.length() - 1); DummyEntry<Object, Object> entry = DummyEntry.create(key, hash, null); DummyValueReference<Object, Object> valueRef = DummyValueReference.create(value); entry.setValueReference(valueRef); table.set(index, entry); segment.count++; assertSame(value, map.get(key, loader)); assertEquals(0, loader.getCount()); assertEquals(1, segment.count); valueRef.clear(); assertNotSame(value, map.get(key, loader)); assertEquals(1, loader.getCount()); assertEquals(1, segment.count); }
public void testSegmentReplace() { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).expireAfterAccess(99999, SECONDS)); Segment<Object, Object> segment = map.segments[0]; // TODO(fry): check recency ordering Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); Object newValue = new Object(); AtomicReferenceArray<ReferenceEntry<Object, Object>> table = segment.table; int index = hash & (table.length() - 1); DummyEntry<Object, Object> entry = DummyEntry.create(key, hash, null); DummyValueReference<Object, Object> oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); // no entry assertNull(segment.replace(key, hash, newValue)); assertEquals(0, segment.count); // same key table.set(index, entry); segment.count++; assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); assertSame(oldValue, segment.replace(key, hash, newValue)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); // cleared entry.setValueReference(oldValueRef); assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertNull(segment.replace(key, hash, newValue)); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); }
public void testSegmentPut() { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).expireAfterAccess(99999, SECONDS)); Segment<Object, Object> segment = map.segments[0]; // TODO(fry): check recency ordering Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); Object newValue = new Object(); // no entry assertEquals(0, segment.count); assertNull(segment.put(key, hash, oldValue, false)); assertEquals(1, segment.count); // same key assertSame(oldValue, segment.put(key, hash, newValue, false)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); // cleared ReferenceEntry<Object, Object> entry = segment.getEntry(key, hash); DummyValueReference<Object, Object> oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertNull(segment.put(key, hash, newValue, false)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); }
public void testSegmentPutIfAbsent() { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).expireAfterAccess(99999, SECONDS)); Segment<Object, Object> segment = map.segments[0]; // TODO(fry): check recency ordering Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); Object newValue = new Object(); // no entry assertEquals(0, segment.count); assertNull(segment.put(key, hash, oldValue, true)); assertEquals(1, segment.count); // same key assertSame(oldValue, segment.put(key, hash, newValue, true)); assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); // cleared ReferenceEntry<Object, Object> entry = segment.getEntry(key, hash); DummyValueReference<Object, Object> oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertNull(segment.put(key, hash, newValue, true)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); }
public void testSegmentPut_expand() { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).initialCapacity(1)); Segment<Object, Object> segment = map.segments[0]; assertEquals(1, segment.table.length()); int count = 1024; for (int i = 0; i < count; i++) { Object key = new Object(); Object value = new Object(); int hash = map.hash(key); assertNull(segment.put(key, hash, value, false)); assertTrue(segment.table.length() > i); } }
public void testSegmentRemove() { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder().concurrencyLevel(1)); Segment<Object, Object> segment = map.segments[0]; Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); AtomicReferenceArray<ReferenceEntry<Object, Object>> table = segment.table; int index = hash & (table.length() - 1); DummyEntry<Object, Object> entry = DummyEntry.create(key, hash, null); DummyValueReference<Object, Object> oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); // no entry assertEquals(0, segment.count); assertNull(segment.remove(key, hash)); assertEquals(0, segment.count); // same key table.set(index, entry); segment.count++; assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); assertSame(oldValue, segment.remove(key, hash)); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); // cleared table.set(index, entry); segment.count++; assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertNull(segment.remove(key, hash)); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); }
static void expireEntries(Segment<?, ?> segment, long now) { segment.lock(); try { segment.expireEntries(now); segment.cleanUp(); } finally { segment.unlock(); } }
public void testExpand() { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).initialCapacity(1)); Segment<Object, Object> segment = map.segments[0]; assertEquals(1, segment.table.length()); // manually add elements to avoid expansion int originalCount = 1024; ReferenceEntry<Object, Object> entry = null; for (int i = 0; i < originalCount; i++) { Object key = new Object(); Object value = new Object(); int hash = map.hash(key); // chain all entries together as we only have a single bucket entry = map.newEntry(key, hash, entry); ValueReference<Object, Object> valueRef = map.newValueReference(entry, value, 1); entry.setValueReference(valueRef); } segment.table.set(0, entry); segment.count = originalCount; ImmutableMap<Object, Object> originalMap = ImmutableMap.copyOf(map); assertEquals(originalCount, originalMap.size()); assertEquals(originalMap, map); for (int i = 1; i <= originalCount * 2; i *= 2) { if (i > 1) { segment.expand(); } assertEquals(i, segment.table.length()); assertEquals(originalCount, countLiveEntries(map, 0)); assertEquals(originalCount, segment.count); assertEquals(originalMap, map); } }
private static <K, V> int countLiveEntries(LocalCache<K, V> map, long now) { int result = 0; for (Segment<K, V> segment : map.segments) { AtomicReferenceArray<ReferenceEntry<K, V>> table = segment.table; for (int i = 0; i < table.length(); i++) { for (ReferenceEntry<K, V> e = table.get(i); e != null; e = e.getNext()) { if (map.isLive(e, now)) { result++; } } } } return result; }
public void testClear() { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .initialCapacity(1) .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(99999, SECONDS)); Segment<Object, Object> segment = map.segments[0]; AtomicReferenceArray<ReferenceEntry<Object, Object>> table = segment.table; assertEquals(1, table.length()); Object key = new Object(); Object value = new Object(); int hash = map.hash(key); DummyEntry<Object, Object> entry = createDummyEntry(key, hash, value, null); segment.recordWrite(entry, 1, map.ticker.read()); segment.table.set(0, entry); segment.readCount.incrementAndGet(); segment.count = 1; segment.totalWeight = 1; assertSame(entry, table.get(0)); assertSame(entry, segment.accessQueue.peek()); assertSame(entry, segment.writeQueue.peek()); segment.clear(); assertNull(table.get(0)); assertTrue(segment.accessQueue.isEmpty()); assertTrue(segment.writeQueue.isEmpty()); assertEquals(0, segment.readCount.get()); assertEquals(0, segment.count); assertEquals(0, segment.totalWeight); }
public void testClear_notification() { QueuingRemovalListener<Object, Object> listener = queuingRemovalListener(); LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .initialCapacity(1) .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(99999, SECONDS) .removalListener(listener)); Segment<Object, Object> segment = map.segments[0]; AtomicReferenceArray<ReferenceEntry<Object, Object>> table = segment.table; assertEquals(1, table.length()); Object key = new Object(); Object value = new Object(); int hash = map.hash(key); DummyEntry<Object, Object> entry = createDummyEntry(key, hash, value, null); segment.recordWrite(entry, 1, map.ticker.read()); segment.table.set(0, entry); segment.readCount.incrementAndGet(); segment.count = 1; segment.totalWeight = 1; assertSame(entry, table.get(0)); assertSame(entry, segment.accessQueue.peek()); assertSame(entry, segment.writeQueue.peek()); segment.clear(); assertNull(table.get(0)); assertTrue(segment.accessQueue.isEmpty()); assertTrue(segment.writeQueue.isEmpty()); assertEquals(0, segment.readCount.get()); assertEquals(0, segment.count); assertEquals(0, segment.totalWeight); assertNotified(listener, key, value, RemovalCause.EXPLICIT); }
public void testRemoveEntry() { LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .initialCapacity(1) .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(99999, SECONDS) .removalListener(countingRemovalListener())); Segment<Object, Object> segment = map.segments[0]; AtomicReferenceArray<ReferenceEntry<Object, Object>> table = segment.table; assertEquals(1, table.length()); Object key = new Object(); Object value = new Object(); int hash = map.hash(key); DummyEntry<Object, Object> entry = createDummyEntry(key, hash, value, null); // remove absent assertFalse(segment.removeEntry(entry, hash, RemovalCause.COLLECTED)); // remove live segment.recordWrite(entry, 1, map.ticker.read()); table.set(0, entry); segment.count = 1; assertTrue(segment.removeEntry(entry, hash, RemovalCause.COLLECTED)); assertNotificationEnqueued(map, key, value, hash); assertTrue(map.removalNotificationQueue.isEmpty()); assertFalse(segment.accessQueue.contains(entry)); assertFalse(segment.writeQueue.contains(entry)); assertEquals(0, segment.count); assertNull(table.get(0)); }
static <K, V> void checkAndDrainRecencyQueue(LocalCache<K, V> map, Segment<K, V> segment, List<ReferenceEntry<K, V>> reads) { if (map.evictsBySize() || map.expiresAfterAccess()) { assertSameEntries(reads, ImmutableList.copyOf(segment.recencyQueue)); } segment.drainRecencyQueue(); }
static <K, V> void checkEvictionQueues(LocalCache<K, V> map, Segment<K, V> segment, List<ReferenceEntry<K, V>> readOrder, List<ReferenceEntry<K, V>> writeOrder) { if (map.evictsBySize() || map.expiresAfterAccess()) { assertSameEntries(readOrder, ImmutableList.copyOf(segment.accessQueue)); } if (map.expiresAfterWrite()) { assertSameEntries(writeOrder, ImmutableList.copyOf(segment.writeQueue)); } }
public void testExpireAfterWrite() { FakeTicker ticker = new FakeTicker(); LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .ticker(ticker) .expireAfterWrite(2, TimeUnit.NANOSECONDS)); Segment<Object, Object> segment = map.segments[0]; Object key = new Object(); Object value = new Object(); map.put(key, value); ReferenceEntry<Object, Object> entry = map.getEntry(key); assertTrue(map.isLive(entry, ticker.read())); segment.writeQueue.add(entry); assertSame(value, map.get(key)); assertSame(entry, segment.writeQueue.peek()); assertEquals(1, segment.writeQueue.size()); segment.recordRead(entry, ticker.read()); segment.expireEntries(ticker.read()); assertSame(value, map.get(key)); assertSame(entry, segment.writeQueue.peek()); assertEquals(1, segment.writeQueue.size()); ticker.advance(1); segment.recordRead(entry, ticker.read()); segment.expireEntries(ticker.read()); assertSame(value, map.get(key)); assertSame(entry, segment.writeQueue.peek()); assertEquals(1, segment.writeQueue.size()); ticker.advance(1); assertNull(map.get(key)); segment.expireEntries(ticker.read()); assertNull(map.get(key)); assertTrue(segment.writeQueue.isEmpty()); }
public void testEvictEntries() { int maxSize = 10; LocalCache<Object, Object> map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).maximumSize(maxSize)); Segment<Object, Object> segment = map.segments[0]; // manually add elements to avoid eviction int originalCount = 1024; ReferenceEntry<Object, Object> entry = null; LinkedHashMap<Object, Object> originalMap = Maps.newLinkedHashMap(); for (int i = 0; i < originalCount; i++) { Object key = new Object(); Object value = new Object(); AtomicReferenceArray<ReferenceEntry<Object, Object>> table = segment.table; int hash = map.hash(key); int index = hash & (table.length() - 1); ReferenceEntry<Object, Object> first = table.get(index); entry = map.newEntry(key, hash, first); ValueReference<Object, Object> valueRef = map.newValueReference(entry, value, 1); entry.setValueReference(valueRef); segment.recordWrite(entry, 1, map.ticker.read()); table.set(index, entry); originalMap.put(key, value); } segment.count = originalCount; segment.totalWeight = originalCount; assertEquals(originalCount, map.size()); assertEquals(originalMap, map); Iterator<Object> it = originalMap.keySet().iterator(); for (int i = 0; i < originalCount - maxSize; i++) { it.next(); it.remove(); } segment.evictEntries(entry); assertEquals(maxSize, map.size()); assertEquals(originalMap, map); }
public void testDrainKeyReferenceQueueOnWrite() { for (CacheBuilder<Object, Object> builder : allKeyValueStrengthMakers()) { LocalCache<Object, Object> map = makeLocalCache(builder.concurrencyLevel(1)); if (map.usesKeyReferences()) { Segment<Object, Object> segment = map.segments[0]; Object keyOne = new Object(); int hashOne = map.hash(keyOne); Object valueOne = new Object(); Object keyTwo = new Object(); Object valueTwo = new Object(); map.put(keyOne, valueOne); ReferenceEntry<Object, Object> entry = segment.getEntry(keyOne, hashOne); @SuppressWarnings("unchecked") Reference<Object> reference = (Reference) entry; reference.enqueue(); map.put(keyTwo, valueTwo); assertFalse(map.containsKey(keyOne)); assertFalse(map.containsValue(valueOne)); assertNull(map.get(keyOne)); assertEquals(1, map.size()); assertNull(segment.keyReferenceQueue.poll()); } } }
public void testDrainValueReferenceQueueOnWrite() { for (CacheBuilder<Object, Object> builder : allKeyValueStrengthMakers()) { LocalCache<Object, Object> map = makeLocalCache(builder.concurrencyLevel(1)); if (map.usesValueReferences()) { Segment<Object, Object> segment = map.segments[0]; Object keyOne = new Object(); int hashOne = map.hash(keyOne); Object valueOne = new Object(); Object keyTwo = new Object(); Object valueTwo = new Object(); map.put(keyOne, valueOne); ReferenceEntry<Object, Object> entry = segment.getEntry(keyOne, hashOne); ValueReference<Object, Object> valueReference = entry.getValueReference(); @SuppressWarnings("unchecked") Reference<Object> reference = (Reference) valueReference; reference.enqueue(); map.put(keyTwo, valueTwo); assertFalse(map.containsKey(keyOne)); assertFalse(map.containsValue(valueOne)); assertNull(map.get(keyOne)); assertEquals(1, map.size()); assertNull(segment.valueReferenceQueue.poll()); } } }
public void testDrainKeyReferenceQueueOnRead() { for (CacheBuilder<Object, Object> builder : allKeyValueStrengthMakers()) { LocalCache<Object, Object> map = makeLocalCache(builder.concurrencyLevel(1)); if (map.usesKeyReferences()) { Segment<Object, Object> segment = map.segments[0]; Object keyOne = new Object(); int hashOne = map.hash(keyOne); Object valueOne = new Object(); Object keyTwo = new Object(); map.put(keyOne, valueOne); ReferenceEntry<Object, Object> entry = segment.getEntry(keyOne, hashOne); @SuppressWarnings("unchecked") Reference<Object> reference = (Reference) entry; reference.enqueue(); for (int i = 0; i < SMALL_MAX_SIZE; i++) { map.get(keyTwo); } assertFalse(map.containsKey(keyOne)); assertFalse(map.containsValue(valueOne)); assertNull(map.get(keyOne)); assertEquals(0, map.size()); assertNull(segment.keyReferenceQueue.poll()); } } }
public void testDrainValueReferenceQueueOnRead() { for (CacheBuilder<Object, Object> builder : allKeyValueStrengthMakers()) { LocalCache<Object, Object> map = makeLocalCache(builder.concurrencyLevel(1)); if (map.usesValueReferences()) { Segment<Object, Object> segment = map.segments[0]; Object keyOne = new Object(); int hashOne = map.hash(keyOne); Object valueOne = new Object(); Object keyTwo = new Object(); map.put(keyOne, valueOne); ReferenceEntry<Object, Object> entry = segment.getEntry(keyOne, hashOne); ValueReference<Object, Object> valueReference = entry.getValueReference(); @SuppressWarnings("unchecked") Reference<Object> reference = (Reference) valueReference; reference.enqueue(); for (int i = 0; i < SMALL_MAX_SIZE; i++) { map.get(keyTwo); } assertFalse(map.containsKey(keyOne)); assertFalse(map.containsValue(valueOne)); assertNull(map.get(keyOne)); assertEquals(0, map.size()); assertNull(segment.valueReferenceQueue.poll()); } } }
/** * Forces the segment containing the given {@code key} to expand (see * {@link Segment#expand()}. */ static <K, V> void forceExpandSegment(Cache<K, V> cache, K key) { checkNotNull(cache); checkNotNull(key); LocalCache<K, V> map = toLocalCache(cache); int hash = map.hash(key); Segment<K, V> segment = map.segmentFor(hash); segment.expand(); }
static void drainRecencyQueues(Cache<?, ?> cache) { if (hasLocalCache(cache)) { LocalCache<?, ?> map = toLocalCache(cache); for (Segment<?, ?> segment : map.segments) { drainRecencyQueue(segment); } } }
static void drainRecencyQueue(Segment<?, ?> segment) { segment.lock(); try { segment.cleanUp(); } finally { segment.unlock(); } }
static void drainReferenceQueue(LocalCache.Segment<?, ?> segment) { segment.lock(); try { segment.drainReferenceQueues(); } finally { segment.unlock(); } }
static int getTotalSegmentSize(Cache<?, ?> cache) { LocalCache<?, ?> map = toLocalCache(cache); int totalSize = 0; for (Segment<?, ?> segment : map.segments) { totalSize += segment.maxSegmentWeight; } return totalSize; }