void willResolveRecord(CacheKey cacheKey) { pathStack = new SimpleStack<>(); recordStack = new SimpleStack<>(); valueStack = new SimpleStack<>(); dependentKeys = new HashSet<>(); path = new ArrayList<>(); currentRecordBuilder = Record.builder(cacheKey.key()); recordSet = new RecordSet(); }
public static int calculateBytes(Record record) { int size = SIZE_OF_RECORD_OVERHEAD + record.key().getBytes(Charset.defaultCharset()).length; for (Map.Entry<String, Object> field : record.fields().entrySet()) { size += (field.getKey().getBytes(Charset.defaultCharset()).length + weighField(field.getValue())); } return size; }
@SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) private <T> T fieldValue(Record record, ResponseField field) { String fieldKey = cacheKeyBuilder.build(field, variables); if (!record.hasField(fieldKey)) { throw new NullPointerException("Missing value: " + field.fieldName()); } return (T) record.field(fieldKey); } }
@NotNull protected Set<String> performMerge(@NotNull final Record apolloRecord, @NotNull final CacheHeaders cacheHeaders) { final Record oldRecord = lruCache.getIfPresent(apolloRecord.key()); if (oldRecord == null) { lruCache.put(apolloRecord.key(), apolloRecord); return apolloRecord.keys(); } else { Set<String> changedKeys = oldRecord.mergeWith(apolloRecord); //re-insert to trigger new weight calculation lruCache.put(apolloRecord.key(), oldRecord); return changedKeys; } }
/** * Lookups record by mutation id, if it's found removes it from the history and invalidates snapshot record. * Snapshot record is superposition of all record versions in the history. */ Set<String> revert(UUID mutationId) { int recordIndex = -1; for (int i = 0; i < history.size(); i++) { if (mutationId.equals(history.get(i).mutationId())) { recordIndex = i; break; } } if (recordIndex == -1) { return Collections.emptySet(); } Set<String> changedKeys = new HashSet<>(); changedKeys.add(history.remove(recordIndex).key()); for (int i = Math.max(0, recordIndex - 1); i < history.size(); i++) { Record record = history.get(i); if (i == Math.max(0, recordIndex - 1)) { snapshot = record.clone(); } else { changedKeys.addAll(snapshot.mergeWith(record)); } } return changedKeys; } }
/** * @return A set of all field keys. A field key incorporates any GraphQL arguments in addition to the field name. */ public Set<String> keys() { Set<String> keys = new HashSet<>(); for (Map.Entry<String, Object> field : fields.entrySet()) { keys.add(key() + "." + field.getKey()); } return keys; }
@Nonnull public Set<String> merge(@Nonnull final Record apolloRecord, @Nonnull final CacheHeaders cacheHeaders) { if (cacheHeaders.hasHeader(DO_NOT_STORE)) { return Collections.emptySet(); } //noinspection ResultOfMethodCallIgnored Optional<NormalizedCache> normalizedCacheOptional = nextCache().apply(new Action<NormalizedCache>() { @Override public void apply(@Nonnull NormalizedCache cache) { cache.merge(apolloRecord, cacheHeaders); } }); Optional<Record> optionalOldRecord = selectRecordForKey(apolloRecord.key()); Set<String> changedKeys; if (!optionalOldRecord.isPresent()) { createRecord(apolloRecord.key(), recordFieldAdapter.toJson(apolloRecord.fields())); changedKeys = Collections.emptySet(); } else { Record oldRecord = optionalOldRecord.get(); changedKeys = oldRecord.mergeWith(apolloRecord); if (!changedKeys.isEmpty()) { updateRecord(oldRecord.key(), recordFieldAdapter.toJson(oldRecord.fields())); } } return changedKeys; }
@Override public void didResolveObject(ResponseField field, Optional<R> objectSource) { path = pathStack.pop(); if (objectSource.isPresent()) { Record completedRecord = currentRecordBuilder.build(); valueStack.push(new CacheReference(completedRecord.key())); dependentKeys.add(completedRecord.key()); recordSet.merge(completedRecord); } currentRecordBuilder = recordStack.pop().toBuilder(); }
/** * @param otherRecord The record to merge into this record. * @return A set of field keys which have changed, or were added. A field key incorporates any GraphQL arguments in * addition to the field name. */ public Set<String> mergeWith(Record otherRecord) { Set<String> changedKeys = new HashSet<>(); for (Map.Entry<String, Object> field : otherRecord.fields.entrySet()) { Object newFieldValue = field.getValue(); boolean hasOldFieldValue = this.fields.containsKey(field.getKey()); Object oldFieldValue = this.fields.get(field.getKey()); if (!hasOldFieldValue || (oldFieldValue == null && newFieldValue != null) || (oldFieldValue != null && !oldFieldValue.equals(newFieldValue))) { this.fields.put(field.getKey(), newFieldValue); changedKeys.add(key() + "." + field.getKey()); adjustSizeEstimate(newFieldValue, oldFieldValue); } } mutationId = otherRecord.mutationId; return changedKeys; }
private void assertTestRecordPresentAndAccurate(Record testRecord, NormalizedCache store) { final Record cacheRecord1 = store.loadRecord(testRecord.key(), CacheHeaders.NONE); assertThat(cacheRecord1.key()).isEqualTo(testRecord.key()); assertThat(cacheRecord1.field("a")).isEqualTo(testRecord.field("a")); assertThat(cacheRecord1.field("b")).isEqualTo(testRecord.field("b")); }
@Test public void testDualCacheMultipleRecord() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCache = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record.Builder recordBuilder = Record.builder("root1"); recordBuilder.addField("bar", "bar"); final Record record1 = recordBuilder.build(); recordBuilder = Record.builder("root2"); recordBuilder.addField("bar", "bar"); final Record record2 = recordBuilder.build(); recordBuilder = Record.builder("root3"); recordBuilder.addField("bar", "bar"); final Record record3 = recordBuilder.build(); Collection<Record> records = Arrays.asList(record1, record2, record3); Collection<String> keys = Arrays.asList(record1.key(), record2.key(), record3.key()); primaryCache.merge(records, CacheHeaders.NONE); assertThat(primaryCache.loadRecords(keys, CacheHeaders.NONE).size()).isEqualTo(3); //verify write through behavior assertThat(primaryCache.loadRecords(keys, CacheHeaders.NONE).size()).isEqualTo(3); assertThat(primaryCache.nextCache().get() .loadRecords(keys, CacheHeaders.NONE).size()).isEqualTo(3); }
@Test public void testFieldsAdapterSerializationDeserialization() throws IOException { Record.Builder recordBuilder = Record.builder("root"); BigDecimal expectedBigDecimal = new BigDecimal(1.23); String expectedStringValue = "StringValue"; Record record = recordBuilder.build(); String json = recordFieldAdapter.toJson(record.fields()); Map<String, Object> deserializedMap = recordFieldAdapter.from(json); assertThat(deserializedMap.get("bigDecimal")).isEqualTo(expectedBigDecimal);
@Test public void testRecordWeigher() { Record.Builder recordBuilder = Record.builder("root"); BigDecimal expectedBigDecimal = new BigDecimal(1.23); String expectedStringValue = "StringValue"; Boolean expectedBooleanValue = true; CacheReference expectedCacheReference = new CacheReference("foo"); List<CacheReference> expectedCacheReferenceList = Arrays.asList(new CacheReference("bar"), new CacheReference ("baz")); List<Object> expectedScalarList = Arrays.<Object>asList("scalarOne", "scalarTwo"); recordBuilder.addField("bigDecimal", expectedBigDecimal); recordBuilder.addField("string", expectedStringValue); recordBuilder.addField("boolean", expectedBooleanValue); recordBuilder.addField("cacheReference", expectedCacheReference); recordBuilder.addField("scalarList", expectedScalarList); recordBuilder.addField("referenceList", expectedCacheReferenceList); Record record = recordBuilder.build(); record.sizeEstimateBytes(); //It's difficult to say what the "right" size estimate is, so just checking it is has been calculate at all. assertThat(record.sizeEstimateBytes()).isNotEqualTo(-1); } }
@Test public void testDualCacheSingleRecord() { LruNormalizedCacheFactory secondaryCacheFactory = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION); NormalizedCache primaryCache = new LruNormalizedCacheFactory(EvictionPolicy.NO_EVICTION) .chain(secondaryCacheFactory).createChain(basicFieldAdapter); Record.Builder recordBuilder = Record.builder("root"); recordBuilder.addField("bar", "bar"); final Record record = recordBuilder.build(); primaryCache.merge(record, CacheHeaders.NONE); //verify write through behavior assertThat(primaryCache.loadRecord("root", CacheHeaders.NONE).field("bar")).isEqualTo("bar"); assertThat(primaryCache.nextCache().get().loadRecord("root", CacheHeaders.NONE).field("bar")).isEqualTo("bar"); }
@Override public int weigh(String key, Record value) { return key.getBytes(Charset.defaultCharset()).length + value.sizeEstimateBytes(); } });
.append(recordEntry.getKey()) .append("\" : {"); for (Map.Entry<String, Object> fieldEntry : recordEntry.getValue().fields().entrySet()) { builder .append("\n \"")
public Record build() { return new Record(key, fields, mutationId); } }