@NotNull @Override public Optional<Record> apply(@NotNull NormalizedCache cache) { return Optional.fromNullable(cache.loadRecord(key, cacheHeaders)); } });
/** * Calls through to {@link NormalizedCache#loadRecord(String, CacheHeaders)}. Implementations should override this * method if the underlying storage technology can offer an optimized manner to read multiple records. * * @param keys The set of {@link Record} keys to read. * @param cacheHeaders The cache headers associated with the request which generated this record. */ @NotNull public Collection<Record> loadRecords(@NotNull Collection<String> keys, @NotNull CacheHeaders cacheHeaders) { List<Record> records = new ArrayList<>(keys.size()); for (String key : keys) { final Record record = loadRecord(key, cacheHeaders); if (record != null) { records.add(record); } } return records; }
@Test public void testDualCache_recordNotPresent() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCacheStore = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); assertThat(primaryCacheStore.loadRecord("not_present_id", CacheHeaders.NONE)).isNull(); }
private void assertTestRecordPresentAndAccurate(Record testRecord, NormalizedCache store) { final Record cacheRecord1 = store.loadRecord(testRecord.key(), CacheHeaders.NONE); assertThat(cacheRecord1.key()).isEqualTo(testRecord.key()); assertThat(cacheRecord1.field("a")).isEqualTo(testRecord.field("a")); assertThat(cacheRecord1.field("b")).isEqualTo(testRecord.field("b")); }
@Test public void testClearPrimaryCache() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); LruNormalizedCache primaryCache = (LruNormalizedCache) new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record record = Record.builder("key").build(); primaryCache.merge(record, CacheHeaders.NONE); primaryCache.clearCurrentCache(); assertThat(primaryCache.nextCache().get() .loadRecord("key", CacheHeaders.NONE)).isNotNull(); assertThat(primaryCache.nextCache().get() .loadRecord("key", CacheHeaders.NONE)).isNotNull(); }
@Test public void testDualCacheSingleRecord() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCache = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record.Builder recordBuilder = Record.builder("root"); recordBuilder.addField("bar", "bar"); final Record record = recordBuilder.build(); primaryCache.merge(record, CacheHeaders.NONE); //verify write through behavior assertThat(primaryCache.loadRecord("root", CacheHeaders.NONE).field("bar")).isEqualTo("bar"); assertThat(primaryCache.nextCache().get().loadRecord("root", CacheHeaders.NONE).field("bar")).isEqualTo("bar"); }
@Test public void testClearAll() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCacheStore = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record record = Record.builder("key").build(); primaryCacheStore.merge(record, CacheHeaders.NONE); primaryCacheStore.clearAll(); assertThat(primaryCacheStore.loadRecord("key", CacheHeaders.NONE)).isNull(); }
@Test public void testClearSecondaryCache() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCache = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record record = Record.builder("key").build(); primaryCache.merge(record, CacheHeaders.NONE); primaryCache.nextCache().get().clearAll(); assertThat(primaryCache.nextCache().get().loadRecord("key", CacheHeaders.NONE)).isNull(); }
/** * Calls through to {@link NormalizedCache#loadRecord(String, CacheHeaders)}. Implementations should override this * method if the underlying storage technology can offer an optimized manner to read multiple records. * * @param keys The set of {@link Record} keys to read. * @param cacheHeaders The cache headers associated with the request which generated this record. */ @Nonnull public Collection<Record> loadRecords(@Nonnull Collection<String> keys, @Nonnull CacheHeaders cacheHeaders) { List<Record> records = new ArrayList<>(keys.size()); for (String key : keys) { final Record record = loadRecord(key, cacheHeaders); if (record != null) { records.add(record); } } return records; }
@Nonnull @Override public Optional<Record> apply(@Nonnull NormalizedCache cache) { return Optional.fromNullable(cache.loadRecord(key, cacheHeaders)); } });
/** * Calls through to {@link NormalizedCache#loadRecord(String, CacheHeaders)}. Implementations should override this * method if the underlying storage technology can offer an optimized manner to read multiple records. * * @param keys The set of {@link Record} keys to read. * @param cacheHeaders The cache headers associated with the request which generated this record. */ @Nonnull public Collection<Record> loadRecords(@Nonnull Collection<String> keys, @Nonnull CacheHeaders cacheHeaders) { List<Record> records = new ArrayList<>(keys.size()); for (String key : keys) { final Record record = loadRecord(key, cacheHeaders); if (record != null) { records.add(record); } } return records; }
@Nonnull @Override public Optional<Record> apply(@Nonnull NormalizedCache cache) { return Optional.fromNullable(cache.loadRecord(key, cacheHeaders)); } });