private void removeAssociationValueFromCache(Class<?> sagaType, String sagaIdentifier, AssociationValue associationValue) { String key = cacheKey(associationValue, sagaType); Set<String> associations = associationsCache.get(key); if (associations != null && associations.remove(sagaIdentifier)) { associationsCache.put(key, associations); } }
@Override protected EventSourcedAggregate<T> doCreateNewForLock(Callable<T> factoryMethod) throws Exception { EventSourcedAggregate<T> aggregate = super.doCreateNewForLock(factoryMethod); CurrentUnitOfWork.get().onRollback(u -> cache.remove(aggregate.identifierAsString())); cache.put(aggregate.identifierAsString(), new AggregateCacheEntry<>(aggregate)); return aggregate; }
@Override protected void doSaveWithLock(EventSourcedAggregate<T> aggregate) { super.doSaveWithLock(aggregate); cache.put(aggregate.identifierAsString(), new AggregateCacheEntry<>(aggregate)); }
private void removeEntry(String aggregateIdentifier) { for (DisruptorRepository repository : repositories.values()) { repository.removeFromCache(aggregateIdentifier); } cache.remove(aggregateIdentifier); }
/** * Perform the actual loading of an aggregate. The necessary locks have been obtained. If the aggregate is * available in the cache, it is returned from there. Otherwise the underlying persistence logic is called to * retrieve the aggregate. * * @param aggregateIdentifier the identifier of the aggregate to load * @param expectedVersion The expected version of the aggregate * @return the fully initialized aggregate */ @Override protected EventSourcedAggregate<T> doLoadWithLock(String aggregateIdentifier, Long expectedVersion) { EventSourcedAggregate<T> aggregate = null; AggregateCacheEntry<T> cacheEntry = cache.get(aggregateIdentifier); if (cacheEntry != null) { aggregate = cacheEntry.recreateAggregate(aggregateModel(), eventStore, repositoryProvider, snapshotTriggerDefinition); } if (aggregate == null) { aggregate = super.doLoadWithLock(aggregateIdentifier, expectedVersion); } else if (aggregate.isDeleted()) { throw new AggregateDeletedException(aggregateIdentifier); } return aggregate; } }
@Override public void insertSaga(Class<? extends T> sagaType, String sagaIdentifier, T saga, Set<AssociationValue> associationValues) { delegate.insertSaga(sagaType, sagaIdentifier, saga, associationValues); sagaCache.put(sagaIdentifier, new CacheEntry<>(saga, associationValues)); addCachedAssociations(associationValues, sagaIdentifier, sagaType); }
@Override public void deleteSaga(Class<? extends T> sagaType, String sagaIdentifier, Set<AssociationValue> associationValues) { sagaCache.remove(sagaIdentifier); associationValues.forEach(av -> removeAssociationValueFromCache(sagaType, sagaIdentifier, av)); delegate.deleteSaga(sagaType, sagaIdentifier, associationValues); }
/** * Perform the actual loading of an aggregate. The necessary locks have been obtained. If the aggregate is * available in the cache, it is returned from there. Otherwise the underlying persistence logic is called to * retrieve the aggregate. * * @param aggregateIdentifier the identifier of the aggregate to load * @param expectedVersion The expected version of the aggregate * @return the fully initialized aggregate */ @Override protected EventSourcedAggregate<T> doLoadWithLock(String aggregateIdentifier, Long expectedVersion) { EventSourcedAggregate<T> aggregate = null; AggregateCacheEntry<T> cacheEntry = cache.get(aggregateIdentifier); if (cacheEntry != null) { aggregate = cacheEntry.recreateAggregate(aggregateModel(), eventStore, repositoryProvider, snapshotTriggerDefinition); } if (aggregate == null) { aggregate = super.doLoadWithLock(aggregateIdentifier, expectedVersion); } else if (aggregate.isDeleted()) { throw new AggregateDeletedException(aggregateIdentifier); } return aggregate; }
/** * Registers the associations of a saga with given {@code sagaIdentifier} and given {@code sagaType} with the * associations cache. * * @param associationValues the association values of the saga * @param sagaIdentifier the identifier of the saga * @param sagaType the type of the saga */ protected void addCachedAssociations(Iterable<AssociationValue> associationValues, String sagaIdentifier, Class<?> sagaType) { for (AssociationValue associationValue : associationValues) { String key = cacheKey(associationValue, sagaType); Set<String> identifiers = associationsCache.get(key); if (identifiers != null && identifiers.add(sagaIdentifier)) { associationsCache.put(key, identifiers); } } }
@Override protected void doDeleteWithLock(EventSourcedAggregate<T> aggregate) { super.doDeleteWithLock(aggregate); cache.put(aggregate.identifierAsString(), new AggregateCacheEntry<>(aggregate)); }
@Override protected EventSourcedAggregate<T> doCreateNewForLock(Callable<T> factoryMethod) throws Exception { EventSourcedAggregate<T> aggregate = super.doCreateNewForLock(factoryMethod); CurrentUnitOfWork.get().onRollback(u -> cache.remove(aggregate.identifierAsString())); cache.put(aggregate.identifierAsString(), new AggregateCacheEntry<>(aggregate)); return aggregate; }
@Override protected void validateOnLoad(Aggregate<T> aggregate, Long expectedVersion) { CurrentUnitOfWork.get().onRollback(u -> cache.remove(aggregate.identifierAsString())); super.validateOnLoad(aggregate, expectedVersion); }
/** * Perform the actual loading of an aggregate. The necessary locks have been obtained. If the aggregate is * available in the cache, it is returned from there. Otherwise the underlying persistence logic is called to * retrieve the aggregate. * * @param aggregateIdentifier the identifier of the aggregate to load * @param expectedVersion The expected version of the aggregate * @return the fully initialized aggregate */ @Override protected EventSourcedAggregate<T> doLoadWithLock(String aggregateIdentifier, Long expectedVersion) { EventSourcedAggregate<T> aggregate = null; AggregateCacheEntry<T> cacheEntry = cache.get(aggregateIdentifier); if (cacheEntry != null) { aggregate = cacheEntry.recreateAggregate(aggregateModel(), eventStore, repositoryProvider, snapshotTriggerDefinition); } if (aggregate == null) { aggregate = super.doLoadWithLock(aggregateIdentifier, expectedVersion); } else if (aggregate.isDeleted()) { throw new AggregateDeletedException(aggregateIdentifier); } return aggregate; } }
@Override public Set<String> findSagas(Class<? extends T> sagaType, AssociationValue associationValue) { final String key = cacheKey(associationValue, sagaType); // this is a dirty read, but a cache should be thread safe anyway Set<String> associations = associationsCache.get(key); if (associations == null) { associations = delegate.findSagas(sagaType, associationValue); associationsCache.put(key, associations); } return new HashSet<>(associations); }
@Override public void updateSaga(Class<? extends T> sagaType, String sagaIdentifier, T saga, AssociationValues associationValues) { sagaCache.put(sagaIdentifier, new CacheEntry<>(saga, associationValues.asSet())); delegate.updateSaga(sagaType, sagaIdentifier, saga, associationValues); associationValues.removedAssociations() .forEach(av -> removeAssociationValueFromCache(sagaType, sagaIdentifier, av)); addCachedAssociations(associationValues.addedAssociations(), sagaIdentifier, sagaType); }
@Override protected EventSourcedAggregate<T> doCreateNewForLock(Callable<T> factoryMethod) throws Exception { EventSourcedAggregate<T> aggregate = super.doCreateNewForLock(factoryMethod); CurrentUnitOfWork.get().onRollback(u -> cache.remove(aggregate.identifierAsString())); cache.put(aggregate.identifierAsString(), new AggregateCacheEntry<>(aggregate)); return aggregate; }
private void removeEntry(String aggregateIdentifier) { for (DisruptorRepository repository : repositories.values()) { repository.removeFromCache(aggregateIdentifier); } cache.remove(aggregateIdentifier); }
@Override public <S extends T> Entry<S> loadSaga(Class<S> sagaType, String sagaIdentifier) { Entry<S> saga = sagaCache.get(sagaIdentifier); if (saga == null) { saga = delegate.loadSaga(sagaType, sagaIdentifier); if (saga != null) { sagaCache.put(sagaIdentifier, new CacheEntry<T>(saga)); } } return saga; }
@Override public Aggregate<T> newInstance(Callable<T> factoryMethod) throws Exception { SnapshotTrigger trigger = snapshotTriggerDefinition.prepareTrigger(aggregateFactory.getAggregateType()); EventSourcedAggregate<T> aggregate = EventSourcedAggregate.initialize(factoryMethod, model, eventStore, repositoryProvider, trigger); firstLevelCache.put(aggregate.identifierAsString(), aggregate); cache.put(aggregate.identifierAsString(), new AggregateCacheEntry<>(aggregate)); return aggregate; }
private void removeEntry(String aggregateIdentifier) { for (DisruptorRepository repository : repositories.values()) { repository.removeFromCache(aggregateIdentifier); } cache.remove(aggregateIdentifier); }