@NotNull @Override public Set<String> apply(@NotNull NormalizedCache cache) { return cache.merge(recordSet, cacheHeaders); } }).or(Collections.<String>emptySet());
@NotNull @Override public Set<String> apply(@NotNull NormalizedCache cache) { return cache.merge(record, cacheHeaders); } }).or(Collections.<String>emptySet());
@Test public void testClearAll() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCacheStore = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record record = Record.builder("key").build(); primaryCacheStore.merge(record, CacheHeaders.NONE); primaryCacheStore.clearAll(); assertThat(primaryCacheStore.loadRecord("key", CacheHeaders.NONE)).isNull(); }
@Test public void testDualCacheMultipleRecord() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCache = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record.Builder recordBuilder = Record.builder("root1"); recordBuilder.addField("bar", "bar"); final Record record1 = recordBuilder.build(); recordBuilder = Record.builder("root2"); recordBuilder.addField("bar", "bar"); final Record record2 = recordBuilder.build(); recordBuilder = Record.builder("root3"); recordBuilder.addField("bar", "bar"); final Record record3 = recordBuilder.build(); Collection<Record> records = Arrays.asList(record1, record2, record3); Collection<String> keys = Arrays.asList(record1.key(), record2.key(), record3.key()); primaryCache.merge(records, CacheHeaders.NONE); assertThat(primaryCache.loadRecords(keys, CacheHeaders.NONE).size()).isEqualTo(3); //verify write through behavior assertThat(primaryCache.loadRecords(keys, CacheHeaders.NONE).size()).isEqualTo(3); assertThat(primaryCache.nextCache().get() .loadRecords(keys, CacheHeaders.NONE).size()).isEqualTo(3); }
@Test public void testClearSecondaryCache() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCache = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record record = Record.builder("key").build(); primaryCache.merge(record, CacheHeaders.NONE); primaryCache.nextCache().get().clearAll(); assertThat(primaryCache.nextCache().get().loadRecord("key", CacheHeaders.NONE)).isNull(); }
@Test public void testDualCacheSingleRecord() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCache = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record.Builder recordBuilder = Record.builder("root"); recordBuilder.addField("bar", "bar"); final Record record = recordBuilder.build(); primaryCache.merge(record, CacheHeaders.NONE); //verify write through behavior assertThat(primaryCache.loadRecord("root", CacheHeaders.NONE).field("bar")).isEqualTo("bar"); assertThat(primaryCache.nextCache().get().loadRecord("root", CacheHeaders.NONE).field("bar")).isEqualTo("bar"); }
@Override public void apply(@Nonnull NormalizedCache cache) { for (Record record : recordSet) { cache.merge(record, cacheHeaders); } } });
@Override public void apply(@Nonnull NormalizedCache cache) { cache.merge(apolloRecord, cacheHeaders); } });
@Override public void apply(@Nonnull NormalizedCache cache) { cache.merge(apolloRecord, cacheHeaders); } });
@Override public void apply(@Nonnull NormalizedCache cache) { cache.merge(apolloRecord, cacheHeaders); } });
@Nonnull @Override public Set<String> apply(@Nonnull NormalizedCache cache) { return cache.merge(record, cacheHeaders); } }).or(Collections.<String>emptySet());
@Nonnull @Override public Set<String> apply(@Nonnull NormalizedCache cache) { return cache.merge(record, cacheHeaders); } }).or(Collections.<String>emptySet());
/** * Calls through to {@link NormalizedCache#merge(Record, CacheHeaders)}. Implementations should override this method * if the underlying storage technology can offer an optimized manner to store multiple records. * * @param recordSet The set of Records to merge. * @param cacheHeaders The {@link CacheHeaders} associated with the request which generated this record. * @return A set of record field keys that have changed. This set is returned by {@link Record#mergeWith(Record)}. */ @Nonnull public Set<String> merge(@Nonnull Collection<Record> recordSet, @Nonnull CacheHeaders cacheHeaders) { Set<String> aggregatedDependentKeys = new LinkedHashSet<>(); for (Record record : recordSet) { aggregatedDependentKeys.addAll(merge(record, cacheHeaders)); } return aggregatedDependentKeys; }
/** * Calls through to {@link NormalizedCache#merge(Record, CacheHeaders)}. Implementations should override this method * if the underlying storage technology can offer an optimized manner to store multiple records. * * @param recordSet The set of Records to merge. * @param cacheHeaders The {@link CacheHeaders} associated with the request which generated this record. * @return A set of record field keys that have changed. This set is returned by {@link Record#mergeWith(Record)}. */ @Nonnull public Set<String> merge(@Nonnull Collection<Record> recordSet, @Nonnull CacheHeaders cacheHeaders) { Set<String> aggregatedDependentKeys = new LinkedHashSet<>(); for (Record record : recordSet) { aggregatedDependentKeys.addAll(merge(record, cacheHeaders)); } return aggregatedDependentKeys; }
@Nonnull @Override public Set<String> merge(@Nonnull final Collection<Record> recordSet, @Nonnull final CacheHeaders cacheHeaders) { if (cacheHeaders.hasHeader(DO_NOT_STORE)) { return Collections.emptySet(); } //noinspection ResultOfMethodCallIgnored Optional<NormalizedCache> normalizedCacheOptional = nextCache().apply(new Action<NormalizedCache>() { @Override public void apply(@Nonnull NormalizedCache cache) { for (Record record : recordSet) { cache.merge(record, cacheHeaders); } } }); Set<String> changedKeys; try { database.beginTransaction(); changedKeys = super.merge(recordSet, cacheHeaders); database.setTransactionSuccessful(); } finally { database.endTransaction(); } return changedKeys; }