public ModelCache(int size) { cache = new LruCache<ModelKey<A>, B>(size) { @Override protected void onItemEvicted(ModelKey<A> key, B item) { key.release(); } }; }
public ModelCache(long size) { cache = new LruCache<ModelKey<A>, B>(size) { @Override protected void onItemEvicted(ModelKey<A> key, B item) { key.release(); } }; }
@Test public void testPreventEviction() { final MemoryCache cache = new LruResourceCache(100); final Resource<?> first = getResource(30); final Key firstKey = new MockKey(); cache.put(firstKey, first); Resource<?> second = getResource(30); Key secondKey = new MockKey(); cache.put(secondKey, second); Resource<?> third = getResource(30); Key thirdKey = new MockKey(); cache.put(thirdKey, third); cache.setResourceRemovedListener(new ResourceRemovedListener() { @Override public void onResourceRemoved(Resource<?> removed) { if (removed == first) { cache.put(firstKey, first); } } }); // trims from 100 to 50, having 30+30+30 items, it should trim to 1 item cache.trimMemory(ComponentCallbacks2.TRIM_MEMORY_UI_HIDDEN); // and that 1 item must be first, because it's forced to return to cache in the listener @SuppressWarnings("unchecked") LruCache<Key, Resource<?>> lruCache = (LruCache<Key, Resource<?>>) cache; assertTrue(lruCache.contains(firstKey)); assertFalse(lruCache.contains(secondKey)); assertFalse(lruCache.contains(thirdKey)); }
public ModelCache(int size) { cache = new LruCache<ModelKey, A>(size); }