@Override public void close(String namespace) { if (config.isEvictOnClose()) { cache.asMap().keySet().removeIf(key -> key.namespace.equals(namespace)); } }
/** Returns the underlying bounded cache, or null if not applicable. */ private static @Nullable BoundedLocalCache<?, ?> unwrap(Cache<?, ?> cache) { ConcurrentMap<?, ?> map = cache.asMap(); if (map instanceof LocalAsyncLoadingCache.AsMapView<?, ?>) { map = ((LocalAsyncLoadingCache.AsMapView<?, ?>) cache.asMap()).delegate; } return (map instanceof BoundedLocalCache<?, ?>) ? (BoundedLocalCache<?, ?>) map : null; }
public static void forceRandomSeed(CaffeineCache cache) throws Exception { final Map map = cache.getCache().asMap(); final Method getFrequencySketch = map.getClass().getDeclaredMethod("frequencySketch"); getFrequencySketch.setAccessible(true); final Object frequencySketch = getFrequencySketch.invoke(map); final Field seedField = frequencySketch.getClass().getDeclaredField("randomSeed"); seedField.setAccessible(true); seedField.setInt(frequencySketch, RANDOM_SEED); } }
private void checkContainsInOrder(Cache<Integer, Integer> cache, Integer... expect) { cache.cleanUp(); List<Integer> evictionList = ImmutableList.copyOf( cache.policy().eviction().get().coldest(Integer.MAX_VALUE).keySet()); assertThat(cache.asMap().size(), is(equalTo(expect.length))); assertThat(cache.asMap().keySet(), containsInAnyOrder(expect)); assertThat(evictionList, is(equalTo(asList(expect)))); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.FULL, weigher = CacheWeigher.COLLECTION, population = Population.EMPTY, keys = ReferenceType.STRONG, values = ReferenceType.STRONG) public void replaceConditionally_sameWeight( Cache<String, List<Integer>> cache, CacheContext context, Eviction<?, ?> eviction) { cache.putAll(ImmutableMap.of("a", asList(1, 2, 3), "b", asList(1))); assertThat(cache.asMap().replace("a", asList(1, 2, 3), asList(4, 5, 6)), is(true)); assertThat(cache.estimatedSize(), is(2L)); assertThat(eviction.weightedSize().getAsLong(), is(4L)); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.FULL, weigher = CacheWeigher.COLLECTION, population = Population.EMPTY, keys = ReferenceType.STRONG, values = ReferenceType.STRONG) public void remove(Cache<String, List<Integer>> cache, CacheContext context, Eviction<?, ?> eviction) { cache.putAll(ImmutableMap.of("a", asList(1, 2, 3), "b", asList(1))); assertThat(cache.asMap().remove("a"), is(asList(1, 2, 3))); assertThat(cache.estimatedSize(), is(1L)); assertThat(eviction.weightedSize().getAsLong(), is(1L)); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.FULL, weigher = CacheWeigher.COLLECTION, population = Population.EMPTY, keys = ReferenceType.STRONG, values = ReferenceType.STRONG) public void removeConditionally_fails( Cache<String, List<Integer>> cache, CacheContext context, Eviction<?, ?> eviction) { cache.putAll(ImmutableMap.of("a", asList(1, 2, 3), "b", asList(1))); assertThat(cache.asMap().remove("a", asList(-1, -2, -3)), is(false)); assertThat(cache.estimatedSize(), is(2L)); assertThat(eviction.weightedSize().getAsLong(), is(4L)); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.FULL, weigher = CacheWeigher.COLLECTION, population = Population.EMPTY, keys = ReferenceType.STRONG, values = ReferenceType.STRONG) public void replace_sameWeight(Cache<String, List<Integer>> cache, CacheContext context, Eviction<?, ?> eviction) { cache.putAll(ImmutableMap.of("a", asList(1, 2, 3), "b", asList(1))); cache.asMap().replace("a", asList(-1, -2, -3)); assertThat(cache.estimatedSize(), is(2L)); assertThat(eviction.weightedSize().getAsLong(), is(4L)); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.FULL, weigher = CacheWeigher.COLLECTION, population = Population.EMPTY, keys = ReferenceType.STRONG, values = ReferenceType.STRONG) public void replace_changeWeight(Cache<String, List<Integer>> cache, CacheContext context, Eviction<?, ?> eviction) { cache.putAll(ImmutableMap.of("a", asList(1, 2, 3), "b", asList(1))); cache.asMap().replace("a", asList(-1, -2, -3, -4)); assertThat(cache.estimatedSize(), is(2L)); assertThat(eviction.weightedSize().getAsLong(), is(5L)); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.FULL, weigher = CacheWeigher.TEN) public void weightedSize(Cache<Integer, Integer> cache, CacheContext context, Eviction<Integer, Integer> eviction) { long weightedSize = 0; for (Integer key : cache.asMap().keySet()) { weightedSize += eviction.weightOf(key).getAsInt(); } assertThat(weightedSize, is(eviction.weightedSize().getAsLong())); assertThat(eviction.weightedSize().getAsLong(), is(10 * cache.estimatedSize())); }
@Test(dataProvider = "caches") @CacheSpec(values = {ReferenceType.WEAK, ReferenceType.SOFT}, population = Population.FULL) public void identity_values(Cache<Integer, Integer> cache, CacheContext context) { @SuppressWarnings("deprecation") Integer value = new Integer(context.original().get(context.firstKey())); assertThat(cache.asMap().containsValue(value), is(false)); }
@Test(dataProvider = "caches") @CacheSpec(population = { Population.PARTIAL, Population.FULL }) public void invalidateAll_partial(Cache<Integer, Integer> cache, CacheContext context) { List<Integer> keys = cache.asMap().keySet().stream() .filter(i -> ((i % 2) == 0)) .collect(Collectors.toList()); cache.invalidateAll(keys); assertThat(cache.estimatedSize(), is(context.initialSize() - keys.size())); assertThat(cache, hasRemovalNotifications(context, keys.size(), RemovalCause.EXPLICIT)); verifyWriter(context, (verifier, writer) -> { verifier.deletedAll(Maps.filterKeys(context.original(), Predicates.in(keys)), RemovalCause.EXPLICIT); }); }
@CheckNoStats @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void invalidateAll_full_writerFails(Cache<Integer, Integer> cache, CacheContext context) { try { cache.invalidateAll(); } finally { assertThat(cache.asMap(), equalTo(context.original())); } }
@Test(dataProvider = "caches") @CacheSpec(keys = ReferenceType.STRONG, values = {ReferenceType.WEAK, ReferenceType.SOFT}, implementation = Implementation.Caffeine, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = Maximum.DISABLED, weigher = CacheWeigher.DEFAULT, population = Population.FULL, stats = Stats.ENABLED, compute = Compute.SYNC, removalListener = Listener.CONSUMING, writer = Writer.EXCEPTIONAL) public void cleanUp_writerFails(Cache<Integer, Integer> cache, CacheContext context) { context.clear(); GcFinalization.awaitFullGc(); cache.cleanUp(); context.disableRejectingCacheWriter(); assertThat(cache.asMap().isEmpty(), is(false)); }
@CheckNoStats @Test(dataProvider = "caches", expectedExceptions = WriteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void putAll_insert_writerFails(Cache<Integer, Integer> cache, CacheContext context) { try { cache.putAll(context.absent()); } finally { assertThat(cache.asMap(), equalTo(context.original())); } }
@CheckNoStats @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void invalidate_writerFails(Cache<Integer, Integer> cache, CacheContext context) { try { cache.invalidate(context.middleKey()); } finally { assertThat(cache.asMap(), equalTo(context.original())); } }
@CheckNoStats @Test(dataProvider = "caches", expectedExceptions = DeleteException.class) @CacheSpec(implementation = Implementation.Caffeine, keys = ReferenceType.STRONG, population = { Population.SINGLETON, Population.PARTIAL, Population.FULL }, compute = Compute.SYNC, writer = Writer.EXCEPTIONAL, removalListener = Listener.REJECTING) public void invalidateAll_partial_writerFails(Cache<Integer, Integer> cache, CacheContext context) { try { cache.invalidateAll(context.firstMiddleLastKeys()); } finally { assertThat(cache.asMap(), equalTo(context.original())); } }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, requiresWeakOrSoft = true, expireAfterAccess = Expire.DISABLED, expireAfterWrite = Expire.DISABLED, maximumSize = Maximum.UNREACHABLE, weigher = CacheWeigher.COLLECTION, population = Population.EMPTY, stats = Stats.ENABLED, removalListener = Listener.DEFAULT, writer = Writer.DISABLED) public void compute_weighted(Cache<Integer, List<Integer>> cache, CacheContext context) { Integer key = context.absentKey(); cache.put(key, ImmutableList.of(1)); GcFinalization.awaitFullGc(); cache.asMap().compute(key, (k, v) -> ImmutableList.of(1, 2, 3)); assertThat(cache.policy().eviction().get().weightedSize().getAsLong(), is(3L)); }
private static <K, V> ConcurrentMap<K, V> map() { return Caffeine.newBuilder() .maximumSize(Integer.MAX_VALUE) .<K, V>build().asMap(); }
private static void addBoundedTests(TestSuite suite) throws Exception { suite.addTest(MapTestFactory.suite("BoundedCache", () -> { Cache<String, String> cache = Caffeine.newBuilder().maximumSize(Long.MAX_VALUE).build(); return cache.asMap(); })); suite.addTest(MapTestFactory.suite("BoundedAsyncCache", () -> { AsyncLoadingCache<String, String> cache = Caffeine.newBuilder() .maximumSize(Long.MAX_VALUE) .buildAsync(key -> null); return cache.synchronous().asMap(); })); }