@Test(dataProvider = "caches") @CacheSpec(population = Population.EMPTY, removalListener = Listener.CONSUMING, keys = ReferenceType.STRONG, values = ReferenceType.STRONG, maximumSize = Maximum.TEN, weigher = CacheWeigher.VALUE) public void evict_weighted_entryTooBig(Cache<Integer, Integer> cache, CacheContext context) { cache.put(1, 1); cache.put(9, 9); assertThat(cache.estimatedSize(), is(2L)); cache.policy().eviction().ifPresent(eviction -> { assertThat(eviction.weightedSize().getAsLong(), is(10L)); }); cache.put(20, 20); assertThat(cache.estimatedSize(), is(2L)); cache.policy().eviction().ifPresent(eviction -> { assertThat(eviction.weightedSize().getAsLong(), is(10L)); }); assertThat(context.consumedNotifications(), is(equalTo(ImmutableList.of( new RemovalNotification<>(20, 20, RemovalCause.SIZE))))); if (context.isCaffeine()) { assertThat(context, hasEvictionWeight(20L)); } }
@Test public void maximumSize_large() { Caffeine<?, ?> builder = Caffeine.newBuilder().maximumSize(Integer.MAX_VALUE); assertThat(builder.maximumSize, is((long) Integer.MAX_VALUE)); Cache<?, ?> cache = builder.build(); assertThat(cache.policy().eviction().get().getMaximum(), is((long) Integer.MAX_VALUE)); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.FULL, population = Population.EMPTY) public void isWeighted(CacheContext context, Eviction<Integer, Integer> eviction) { assertThat(eviction.isWeighted(), is(context.isWeighted())); }
eviction.weightedSize().ifPresent(weightedSize -> cacheMetrics.put( cacheMetricName("weighted", "size"), (Gauge<Long>) () -> weightedSize));
@Override public org.apache.druid.client.cache.CacheStats getStats() { final CacheStats stats = cache.stats(); final long size = cache .policy().eviction() .map(eviction -> eviction.isWeighted() ? eviction.weightedSize() : OptionalLong.empty()) .orElse(OptionalLong.empty()).orElse(-1); return new org.apache.druid.client.cache.CacheStats( stats.hitCount(), stats.missCount(), cache.estimatedSize(), size, stats.evictionCount(), 0, stats.loadFailureCount() ); }
@Override public io.druid.client.cache.CacheStats getStats() { final CacheStats stats = cache.stats(); final long size = cache .policy().eviction() .map(eviction -> eviction.isWeighted() ? eviction.weightedSize() : OptionalLong.empty()) .orElse(OptionalLong.empty()).orElse(-1); return new io.druid.client.cache.CacheStats( stats.hitCount(), stats.missCount(), cache.estimatedSize(), size, stats.evictionCount(), 0, stats.loadFailureCount() ); }
@Override public long weightedSize() { return cache.policy().eviction() .map(policy -> policy.weightedSize().orElseGet(cache::estimatedSize)) .orElseGet(cache::estimatedSize); } }
@Override public long weightedSize() { return cache.policy().eviction() .map(policy -> policy.weightedSize().orElseGet(cache::estimatedSize)) .orElseGet(cache::estimatedSize); } }
private void resizeIfNecessary() { if (evictionPolicy.getMaximum() != size.get()) { evictionPolicy.setMaximum(size.get()); } }
@Override public long weightedSize() { return cache.policy().eviction() .map(policy -> policy.weightedSize().orElseGet(cache::estimatedSize)) .orElseGet(cache::estimatedSize); } }
@Override public long weightedSize() { return cache.policy().eviction() .map(policy -> policy.weightedSize().orElseGet(cache::estimatedSize)) .orElseGet(cache::estimatedSize); } }
@Override public long getMaxSize() { return policy.getMaximum(); }
private Block load(Loader loader, Map<String,byte[]> resolvedDeps) { byte[] data = loader.load((int) Math.min(Integer.MAX_VALUE, policy.getMaximum()), resolvedDeps); if (data == null) { return null; } return new Block(data); }
private void resizeIfNecessary() { if (evictionPolicy.getMaximum() != size.get()) { evictionPolicy.setMaximum(size.get()); } }
private void logStats() { double maxMB = ((double) policy.getMaximum()) / ((double) (1024 * 1024)); double sizeMB = ((double) policy.weightedSize().getAsLong()) / ((double) (1024 * 1024)); double freeMB = maxMB - sizeMB; log.debug("Cache Size={}MB, Free={}MB, Max={}MB, Blocks={}", sizeMB, freeMB, maxMB, cache.estimatedSize()); log.debug(cache.stats().toString()); }
cache.put(3, value3); await().until(cache::estimatedSize, is(4L)); assertThat(eviction.weightedSize().getAsLong(), is(10L)); await().until(cache::estimatedSize, is(4L)); assertThat(cache.asMap().containsKey(1), is(false)); assertThat(eviction.weightedSize().getAsLong(), is(8L)); verifyWriter(context, (verifier, ignored) -> { verify(writer).delete(1, value1, RemovalCause.SIZE); assertThat(eviction.weightedSize().getAsLong(), is(8L)); verifyWriter(context, (verifier, ignored) -> { verify(writer).delete(5, value5, RemovalCause.SIZE);
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.TEN, weigher = CacheWeigher.VALUE, population = Population.EMPTY, keys = ReferenceType.STRONG, values = ReferenceType.STRONG, removalListener = Listener.CONSUMING) @SuppressWarnings("FutureReturnValueIgnored") public void evict_weighted_async(AsyncLoadingCache<Integer, Integer> cache, CacheContext context, Eviction<?, ?> eviction) { AtomicBoolean ready = new AtomicBoolean(); AtomicBoolean done = new AtomicBoolean(); CompletableFuture<Integer> valueFuture = CompletableFuture.supplyAsync(() -> { Awaits.await().untilTrue(ready); return 6; }); valueFuture.whenComplete((r, e) -> done.set(true)); cache.put(5, CompletableFuture.completedFuture(5)); cache.put(4, CompletableFuture.completedFuture(4)); cache.put(6, valueFuture); assertThat(eviction.weightedSize().getAsLong(), is(9L)); assertThat(cache.synchronous().estimatedSize(), is(3L)); ready.set(true); Awaits.await().untilTrue(done); Awaits.await().until(context::consumedNotifications, hasSize(1)); Awaits.await().until(() -> cache.synchronous().estimatedSize(), is(2L)); Awaits.await().until(() -> eviction.weightedSize().getAsLong(), is(10L)); assertThat(context, hasEvictionWeight(5L)); assertThat(context, hasRemovalNotifications(context, 1, RemovalCause.SIZE)); verifyWriter(context, (verifier, writer) -> verifier.deletions(1, RemovalCause.SIZE)); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.ZERO, weigher = CacheWeigher.COLLECTION, population = Population.EMPTY, keys = ReferenceType.STRONG, values = ReferenceType.STRONG) @SuppressWarnings("FutureReturnValueIgnored") public void evict_zero_async(AsyncLoadingCache<Integer, List<Integer>> cache, CacheContext context, Eviction<?, ?> eviction) { AtomicBoolean ready = new AtomicBoolean(); AtomicBoolean done = new AtomicBoolean(); CompletableFuture<List<Integer>> valueFuture = CompletableFuture.supplyAsync(() -> { Awaits.await().untilTrue(ready); return ImmutableList.of(1, 2, 3, 4, 5); }); valueFuture.whenComplete((r, e) -> done.set(true)); cache.put(context.absentKey(), valueFuture); assertThat(eviction.weightedSize().getAsLong(), is(0L)); assertThat(cache.synchronous().estimatedSize(), is(1L)); ready.set(true); Awaits.await().untilTrue(done); Awaits.await().until(() -> eviction.weightedSize().getAsLong(), is(0L)); Awaits.await().until(() -> cache.synchronous().estimatedSize(), is(0L)); assertThat(context, hasRemovalNotifications(context, 1, RemovalCause.SIZE)); verifyWriter(context, (verifier, writer) -> verifier.deletions(1, RemovalCause.SIZE)); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, population = Population.FULL, maximumSize = { Maximum.ZERO, Maximum.ONE, Maximum.FULL }, weigher = {CacheWeigher.DEFAULT, CacheWeigher.TEN}) public void evict(Cache<Integer, Integer> cache, CacheContext context, Eviction<Integer, Integer> eviction) { cache.putAll(context.absent()); if (eviction.isWeighted()) { assertThat(eviction.weightedSize().getAsLong(), is(context.maximumWeight())); } else { assertThat(cache.estimatedSize(), is(context.maximumSize())); } int count = context.absentKeys().size(); assertThat(context, hasEvictionCount(count)); assertThat(cache, hasRemovalNotifications(context, count, RemovalCause.SIZE)); verifyWriter(context, (verifier, writer) -> { Map<Integer, Integer> all = new HashMap<>(context.original()); all.putAll(context.absent()); MapDifference<Integer, Integer> diff = Maps.difference(all, cache.asMap()); verifier.deletedAll(diff.entriesOnlyOnLeft(), RemovalCause.SIZE); }); }
@Test(dataProvider = "caches") @CacheSpec(implementation = Implementation.Caffeine, maximumSize = Maximum.FULL, weigher = CacheWeigher.COLLECTION, population = Population.EMPTY, keys = ReferenceType.STRONG, values = ReferenceType.STRONG) @SuppressWarnings("FutureReturnValueIgnored") public void put_asyncWeight(AsyncLoadingCache<Integer, List<Integer>> cache, CacheContext context, Eviction<?, ?> eviction) { AtomicBoolean ready = new AtomicBoolean(); AtomicBoolean done = new AtomicBoolean(); CompletableFuture<List<Integer>> valueFuture = CompletableFuture.supplyAsync(() -> { Awaits.await().untilTrue(ready); return ImmutableList.of(1, 2, 3, 4, 5); }); valueFuture.whenComplete((r, e) -> done.set(true)); cache.put(context.absentKey(), valueFuture); assertThat(eviction.weightedSize().getAsLong(), is(0L)); assertThat(cache.synchronous().estimatedSize(), is(1L)); ready.set(true); Awaits.await().untilTrue(done); Awaits.await().until(() -> eviction.weightedSize().getAsLong(), is(5L)); Awaits.await().until(() -> cache.synchronous().estimatedSize(), is(1L)); }