public RealApolloStore(@NotNull NormalizedCache normalizedCache, @NotNull CacheKeyResolver cacheKeyResolver, @NotNull final ScalarTypeAdapters scalarTypeAdapters, @NotNull Executor dispatcher, @NotNull ApolloLogger logger) { checkNotNull(normalizedCache, "cacheStore == null"); this.optimisticCache = (OptimisticNormalizedCache) new OptimisticNormalizedCache().chain(normalizedCache); this.cacheKeyResolver = checkNotNull(cacheKeyResolver, "cacheKeyResolver == null"); this.scalarTypeAdapters = checkNotNull(scalarTypeAdapters, "scalarTypeAdapters == null"); this.dispatcher = checkNotNull(dispatcher, "dispatcher == null"); this.logger = checkNotNull(logger, "logger == null"); this.lock = new ReentrantReadWriteLock(); this.subscribers = Collections.newSetFromMap(new WeakHashMap<RecordChangeSubscriber, Boolean>()); this.cacheKeyBuilder = new RealCacheKeyBuilder(); }
@Override @Nullable public Record read(@NotNull String key, @NotNull CacheHeaders cacheHeaders) { return optimisticCache.loadRecord(checkNotNull(key, "key == null"), cacheHeaders); }
@Override @NotNull public Collection<Record> read(@NotNull Collection<String> keys, @NotNull CacheHeaders cacheHeaders) { return optimisticCache.loadRecords(checkNotNull(keys, "keys == null"), cacheHeaders); }
@Override public Set<String> execute(WriteableStore cache) { RealResponseWriter responseWriter = new RealResponseWriter(operation.variables(), scalarTypeAdapters); operationData.marshaller().marshal(responseWriter); ResponseNormalizer<Map<String, Object>> responseNormalizer = networkResponseNormalizer(); responseNormalizer.willResolveRootQuery(operation); responseWriter.resolveFields(responseNormalizer); if (optimistic) { List<Record> updatedRecords = new ArrayList<>(); for (Record record : responseNormalizer.records()) { updatedRecords.add(record.toBuilder().mutationId(mutationId).build()); } return optimisticCache.mergeOptimisticUpdates(updatedRecords); } else { return optimisticCache.merge(responseNormalizer.records(), CacheHeaders.NONE); } } });
@Override public boolean remove(@NotNull final CacheKey cacheKey, final boolean cascade) { checkNotNull(cacheKey, "cacheKey == null"); boolean result = nextCache().map(new Function<NormalizedCache, Boolean>() { @NotNull @Override public Boolean apply(@NotNull NormalizedCache cache) { return cache.remove(cacheKey, cascade); } }).or(Boolean.FALSE); RecordJournal recordJournal = lruCache.getIfPresent(cacheKey.key()); if (recordJournal != null) { lruCache.invalidate(cacheKey.key()); result = true; if (cascade) { for (CacheReference cacheReference : recordJournal.snapshot.referencedFields()) { result = result & remove(CacheKey.from(cacheReference.key()), true); } } } return result; }
@Override public Map<Class, Map<String, Record>> dump() { Map<String, Record> records = new LinkedHashMap<>(); for (Map.Entry<String, RecordJournal> entry : lruCache.asMap().entrySet()) { records.put(entry.getKey(), entry.getValue().snapshot); } Map<Class, Map<String, Record>> dump = new LinkedHashMap<>(); dump.put(this.getClass(), Collections.unmodifiableMap(records)); if (nextCache().isPresent()) { dump.putAll(nextCache().get().dump()); } return dump; }
@Override @NotNull public Set<String> merge(@NotNull Collection<Record> recordSet, @NotNull CacheHeaders cacheHeaders) { return optimisticCache.merge(checkNotNull(recordSet, "recordSet == null"), cacheHeaders); }
@NotNull public Set<String> mergeOptimisticUpdates(@NotNull Collection<Record> recordSet) { Set<String> aggregatedDependentKeys = new LinkedHashSet<>(); for (Record record : recordSet) { aggregatedDependentKeys.addAll(mergeOptimisticUpdate(record)); } return aggregatedDependentKeys; }
@Override public Set<String> execute(WriteableStore cache) { CacheResponseWriter cacheResponseWriter = new CacheResponseWriter(operation.variables(), scalarTypeAdapters); operationData.marshaller().marshal(cacheResponseWriter); ResponseNormalizer<Map<String, Object>> responseNormalizer = networkResponseNormalizer(); responseNormalizer.willResolveRootQuery(operation); Collection<Record> records = cacheResponseWriter.normalize(responseNormalizer); if (optimistic) { List<Record> updatedRecords = new ArrayList<>(); for (Record record : records) { updatedRecords.add(record.toBuilder().mutationId(mutationId).build()); } return optimisticCache.mergeOptimisticUpdates(updatedRecords); } else { return optimisticCache.merge(records, CacheHeaders.NONE); } } });
@SuppressWarnings("ResultOfMethodCallIgnored") @Override public void clearAll() { lruCache.invalidateAll(); //noinspection ResultOfMethodCallIgnored nextCache().apply(new Action<NormalizedCache>() { @Override public void apply(@NotNull NormalizedCache cache) { cache.clearAll(); } }); }
@Override public Set<String> merge(Record record, @NotNull CacheHeaders cacheHeaders) { return optimisticCache.merge(checkNotNull(record, "record == null"), cacheHeaders); }
@Nonnull public Set<String> mergeOptimisticUpdates(@Nonnull Collection<Record> recordSet) { Set<String> aggregatedDependentKeys = new LinkedHashSet<>(); for (Record record : recordSet) { aggregatedDependentKeys.addAll(mergeOptimisticUpdate(record)); } return aggregatedDependentKeys; }
public RealAppSyncStore(@Nonnull NormalizedCache normalizedCache, @Nonnull CacheKeyResolver cacheKeyResolver, @Nonnull final ScalarTypeAdapters scalarTypeAdapters, @Nonnull Executor dispatcher, @Nonnull ApolloLogger logger) { checkNotNull(normalizedCache, "cacheStore == null"); this.optimisticCache = (OptimisticNormalizedCache) new OptimisticNormalizedCache().chain(normalizedCache); this.cacheKeyResolver = checkNotNull(cacheKeyResolver, "cacheKeyResolver == null"); this.scalarTypeAdapters = checkNotNull(scalarTypeAdapters, "scalarTypeAdapters == null"); this.dispatcher = checkNotNull(dispatcher, "dispatcher == null"); this.logger = checkNotNull(logger, "logger == null"); this.lock = new ReentrantReadWriteLock(); this.subscribers = Collections.newSetFromMap(new WeakHashMap<RecordChangeSubscriber, Boolean>()); }
@Override public Set<String> execute(WriteableStore cache) { CacheResponseWriter cacheResponseWriter = new CacheResponseWriter(operation.variables(), scalarTypeAdapters); operationData.marshaller().marshal(cacheResponseWriter); ResponseNormalizer<Map<String, Object>> responseNormalizer = networkResponseNormalizer(); responseNormalizer.willResolveRootQuery(operation); Collection<Record> records = cacheResponseWriter.normalize(responseNormalizer); if (optimistic) { List<Record> updatedRecords = new ArrayList<>(); for (Record record : records) { updatedRecords.add(record.toBuilder().mutationId(mutationId).build()); } return optimisticCache.mergeOptimisticUpdates(updatedRecords); } else { return optimisticCache.merge(records, CacheHeaders.NONE); } } });
@Nullable @Override public Record loadRecord(@NotNull final String key, @NotNull final CacheHeaders cacheHeaders) { checkNotNull(key, "key == null"); checkNotNull(cacheHeaders, "cacheHeaders == null"); try { final Optional<Record> nonOptimisticRecord = nextCache() .flatMap(new Function<NormalizedCache, Optional<Record>>() { @NotNull @Override public Optional<Record> apply(@NotNull NormalizedCache cache) { return Optional.fromNullable(cache.loadRecord(key, cacheHeaders)); } }); final RecordJournal journal = lruCache.getIfPresent(key); if (journal != null) { return nonOptimisticRecord.map(new Function<Record, Record>() { @NotNull @Override public Record apply(@NotNull Record record) { Record result = record.clone(); result.mergeWith(journal.snapshot); return result; } }).or(journal.snapshot.clone()); } else { return nonOptimisticRecord.orNull(); } } catch (Exception ignore) { return null; } }
@Override public Set<String> merge(Record record, @Nonnull CacheHeaders cacheHeaders) { return optimisticCache.merge(checkNotNull(record, "record == null"), cacheHeaders); }
@Nonnull public Set<String> mergeOptimisticUpdates(@Nonnull Collection<Record> recordSet) { Set<String> aggregatedDependentKeys = new LinkedHashSet<>(); for (Record record : recordSet) { aggregatedDependentKeys.addAll(mergeOptimisticUpdate(record)); } return aggregatedDependentKeys; }
@Nullable public Record read(@Nonnull String key, @Nonnull CacheHeaders cacheHeaders) { return optimisticCache.loadRecord(checkNotNull(key, "key == null"), cacheHeaders); }
@Nonnull public Collection<Record> read(@Nonnull Collection<String> keys, @Nonnull CacheHeaders cacheHeaders) { return optimisticCache.loadRecords(checkNotNull(keys, "keys == null"), cacheHeaders); }
public RealAppSyncStore(@Nonnull NormalizedCache normalizedCache, @Nonnull CacheKeyResolver cacheKeyResolver, @Nonnull final ScalarTypeAdapters scalarTypeAdapters, @Nonnull Executor dispatcher, @Nonnull ApolloLogger logger) { checkNotNull(normalizedCache, "cacheStore == null"); this.optimisticCache = (OptimisticNormalizedCache) new OptimisticNormalizedCache().chain(normalizedCache); this.cacheKeyResolver = checkNotNull(cacheKeyResolver, "cacheKeyResolver == null"); this.scalarTypeAdapters = checkNotNull(scalarTypeAdapters, "scalarTypeAdapters == null"); this.dispatcher = checkNotNull(dispatcher, "dispatcher == null"); this.logger = checkNotNull(logger, "logger == null"); this.lock = new ReentrantReadWriteLock(); this.subscribers = Collections.newSetFromMap(new WeakHashMap<RecordChangeSubscriber, Boolean>()); }