Refine search
public StandardLockCleanerService(KeyColumnValueStore store, ConsistentKeyLockerSerializer serializer, ExecutorService exec, Duration cooldown, TimestampProvider times) { this.store = store; this.serializer = serializer; this.exec = exec; this.times = times; blocked = CacheBuilder.newBuilder() .expireAfterWrite(cooldown.toNanos(), TimeUnit.NANOSECONDS) .concurrencyLevel(COOLDOWN_CONCURRENCY_LEVEL) .<KeyColumn, Instant>build() .asMap(); }
public void testConcurrencyLevel_large() { CacheBuilder.newBuilder().concurrencyLevel(Integer.MAX_VALUE); // don't actually build this beast }
public StandardLockCleanerService(KeyColumnValueStore store, ConsistentKeyLockerSerializer serializer, ExecutorService exec, Duration cooldown, TimestampProvider times) { this.store = store; this.serializer = serializer; this.exec = exec; this.times = times; blocked = CacheBuilder.newBuilder() .expireAfterWrite(cooldown.toNanos(), TimeUnit.NANOSECONDS) .concurrencyLevel(COOLDOWN_CONCURRENCY_LEVEL) .<KeyColumn, Instant>build() .asMap(); }
private static Cache<String, SSLContext> initDefaultCertificateCache() { return CacheBuilder.newBuilder() // .expireAfterAccess(5, TimeUnit.MINUTES) // .concurrencyLevel(16) // .build(); }
public TileCache(Graph graph) { this.graph = graph; this.tileCache = CacheBuilder.newBuilder() .concurrencyLevel(concurrency) .maximumSize(size) .build(this); }
private void initCaches() { requestIdWebSocketCache = CacheBuilder.newBuilder().maximumSize(10000) .concurrencyLevel(2).expireAfterAccess(1, TimeUnit.MINUTES).build(); requestIdRestSocketCache = CacheBuilder.newBuilder().maximumSize(10000) .concurrencyLevel(2).expireAfterAccess(1, TimeUnit.MINUTES).build(); fileTokenTempFileCache = CacheBuilder.newBuilder().maximumSize(10000) .concurrencyLevel(2).expireAfterAccess(1, TimeUnit.MINUTES).build(); requestIdRestFormatCache = CacheBuilder.newBuilder().maximumSize(10000) .concurrencyLevel(2).expireAfterAccess(1, TimeUnit.MINUTES).build(); }
public TopNSet(int size){ maxsize = size; set = CacheBuilder.newBuilder().concurrencyLevel(64).<String, Long>build().asMap(); }
public void testConcurrencyLevel_setTwice() { CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().concurrencyLevel(16); try { // even to the same value is not allowed builder.concurrencyLevel(16); fail(); } catch (IllegalStateException expected) { } }
@Override public <K extends Serializable, V extends Serializable> Cache<K, V> getCache(String cacheName) { Cache<K, V> cache = CacheBuilder.newBuilder() .weakValues() .concurrencyLevel(DEFAULT_CONCURRENCY_LEVEL) .expireAfterAccess(DEFAULT_EXPIRATION_MINUTES, TimeUnit.MINUTES) .maximumSize(DEFAULT_MAX_ENTRIES) .build(); return cache; }
public StandardSchemaCache(final int size, final StoreRetrieval retriever) { Preconditions.checkArgument(size>0,"Size must be positive"); Preconditions.checkNotNull(retriever); maxCachedTypes = size; maxCachedRelations = maxCachedTypes *CACHE_RELATION_MULTIPLIER; this.retriever=retriever; typeNamesBackup = CacheBuilder.newBuilder() .concurrencyLevel(CONCURRENCY_LEVEL).initialCapacity(INITIAL_CACHE_SIZE) .maximumSize(maxCachedTypes).build(); typeNames = new ConcurrentHashMap<String, Long>(INITIAL_CAPACITY,0.75f,CONCURRENCY_LEVEL); schemaRelationsBackup = CacheBuilder.newBuilder() .concurrencyLevel(CONCURRENCY_LEVEL).initialCapacity(INITIAL_CACHE_SIZE *CACHE_RELATION_MULTIPLIER) .maximumSize(maxCachedRelations).build(); // typeRelations = new ConcurrentHashMap<Long, EntryList>(INITIAL_CAPACITY*CACHE_RELATION_MULTIPLIER,0.75f,CONCURRENCY_LEVEL); schemaRelations = new NonBlockingHashMapLong<EntryList>(INITIAL_CAPACITY*CACHE_RELATION_MULTIPLIER); //TODO: Is this data structure safe or should we go with ConcurrentHashMap (line above)? }
public GuavaCacheWrapper(MetricRegistry registry) { cache = CacheBuilder.newBuilder() .expireAfterWrite(1, TimeUnit.SECONDS) .maximumSize(1000) .concurrencyLevel(8) .recordStats() .build(); MetricUtils.safelyRegisterAll(registry, new CacheStatsSet(MetricRegistry.name(MongoDbAuthorizationCacheManager.class, "cache"), cache)); }
public RelationQueryCache(EdgeSerializer edgeSerializer, int capacity) { this.edgeSerializer = edgeSerializer; this.cache = CacheBuilder.newBuilder().maximumSize(capacity*3/2).initialCapacity(capacity) .concurrencyLevel(2).build(); relationTypes = new EnumMap<RelationCategory, SliceQuery>(RelationCategory.class); for (RelationCategory rt : RelationCategory.values()) { relationTypes.put(rt,edgeSerializer.getQuery(rt,false)); } }
public void testConcurrencyLevel_zero() { CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder(); try { builder.concurrencyLevel(0); fail(); } catch (IllegalArgumentException expected) { } }
public RelationQueryCache(EdgeSerializer edgeSerializer, int capacity) { this.edgeSerializer = edgeSerializer; this.cache = CacheBuilder.newBuilder().maximumSize(capacity*3/2).initialCapacity(capacity) .concurrencyLevel(2).build(); relationTypes = new EnumMap<>(RelationCategory.class); for (RelationCategory rt : RelationCategory.values()) { relationTypes.put(rt,edgeSerializer.getQuery(rt,false)); } }
public void testUpdateRecency_onInvalidate() { IdentityLoader<Integer> loader = identityLoader(); final LoadingCache<Integer, Integer> cache = CacheBuilder.newBuilder().maximumSize(MAX_SIZE).concurrencyLevel(1).build(loader); CacheTesting.checkRecency( cache, MAX_SIZE, new Receiver<ReferenceEntry<Integer, Integer>>() { @Override public void accept(ReferenceEntry<Integer, Integer> entry) { Integer key = entry.getKey(); cache.invalidate(key); } }); }
CacheBuilder<K, V> recreateCacheBuilder() { CacheBuilder<K, V> builder = CacheBuilder.newBuilder() .setKeyStrength(keyStrength) .setValueStrength(valueStrength) .keyEquivalence(keyEquivalence) .valueEquivalence(valueEquivalence) .concurrencyLevel(concurrencyLevel) .removalListener(removalListener); builder.strictParsing = false;
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder(); if (initialCapacity != null) { builder.initialCapacity(initialCapacity); builder.concurrencyLevel(concurrencyLevel);
@GwtIncompatible // CacheTesting public void testConcurrencyLevel_small() { LoadingCache<?, ?> cache = CacheBuilder.newBuilder().concurrencyLevel(1).build(identityLoader()); LocalCache<?, ?> map = CacheTesting.toLocalCache(cache); assertThat(map.segments).hasLength(1); }
public void testEviction_maxSizeOneSegment() { IdentityLoader<Integer> loader = identityLoader(); LoadingCache<Integer, Integer> cache = CacheBuilder.newBuilder().concurrencyLevel(1).maximumSize(MAX_SIZE).build(loader); for (int i = 0; i < 2 * MAX_SIZE; i++) { cache.getUnchecked(i); assertEquals(Math.min(i + 1, MAX_SIZE), cache.size()); } assertEquals(MAX_SIZE, cache.size()); CacheTesting.checkValidState(cache); }
public void testCacheBuilderFrom_string() { CacheBuilder<?, ?> fromString = CacheBuilder.from( "initialCapacity=10,maximumSize=20,concurrencyLevel=30," + "weakKeys,weakValues,expireAfterAccess=10m"); CacheBuilder<?, ?> expected = CacheBuilder.newBuilder() .initialCapacity(10) .maximumSize(20) .concurrencyLevel(30) .weakKeys() .weakValues() .expireAfterAccess(10L, TimeUnit.MINUTES); assertCacheBuilderEquivalence(expected, fromString); }