@Override public boolean hasNext() { // check if there is anything to read if (!delegate.hasNext()) { return false; } DomainEventMessage<?> peeked = delegate.peek(); while (!filter.test(peeked)) { // consume delegate.next(); if (delegate.hasNext()) { peeked = delegate.peek(); } else { return false; } } return delegate.hasNext(); }
@Override default Optional<Long> lastSequenceNumberFor(String aggregateIdentifier) { return readEvents(aggregateIdentifier).asStream().map(DomainEventMessage::getSequenceNumber) .max(Long::compareTo); } }
/** * Create an empty DomainEventStream. * * @return A DomainEventStream containing no events */ static DomainEventStream empty() { return DomainEventStream.of(); }
/** * Open an event stream containing all domain events belonging to the given {@code aggregateIdentifier}. * <p> * The returned stream is <em>finite</em>, ending with the last known event of the aggregate. If the event store * holds no events of the given aggregate an empty stream is returned. * <p> * The default implementation invokes {@link #readEvents(String)} and then filters out events with a sequence number * smaller than {@code firstSequenceNumber}. * * @param aggregateIdentifier the identifier of the aggregate whose events to fetch * @param firstSequenceNumber the expected sequence number of the first event in the returned stream * @return a stream of all currently stored events of the aggregate */ default DomainEventStream readEvents(String aggregateIdentifier, long firstSequenceNumber) { DomainEventStream wholeStream = readEvents(aggregateIdentifier); return DomainEventStream .of(wholeStream.asStream().filter(event -> event.getSequenceNumber() >= firstSequenceNumber), wholeStream::getLastSequenceNumber); }
@Override public DomainEventStream readEvents(String aggregateIdentifier, long firstSequenceNumber) { return DomainEventStream.concat(storageEngine.readEvents(aggregateIdentifier, firstSequenceNumber), DomainEventStream.of( stagedDomainEventMessages(aggregateIdentifier) .filter(m -> m.getSequenceNumber() >= firstSequenceNumber))); }
private DomainEventMessage<?> upcastAndDeserializeDomainEvent(DomainEventData<?> domainEventData) { DomainEventStream upcastedStream = EventStreamUtils.upcastAndDeserializeDomainEvents( Stream.of(domainEventData), new GrpcMetaDataAwareSerializer(isSnapshot(domainEventData) ? getSnapshotSerializer() : getEventSerializer()), upcasterChain ); return upcastedStream.hasNext() ? upcastedStream.next() : null; }
/** * Perform the actual loading of an aggregate. The necessary locks have been obtained. * * @param aggregateIdentifier the identifier of the aggregate to load * @param expectedVersion The expected version of the loaded aggregate * @return the fully initialized aggregate * * @throws AggregateDeletedException in case an aggregate existed in the past, but has been deleted * @throws AggregateNotFoundException when an aggregate with the given identifier does not exist */ @Override protected EventSourcedAggregate<T> doLoadWithLock(String aggregateIdentifier, Long expectedVersion) { DomainEventStream eventStream = readEvents(aggregateIdentifier); SnapshotTrigger trigger = snapshotTriggerDefinition.prepareTrigger(aggregateFactory.getAggregateType()); if (!eventStream.hasNext()) { throw new AggregateNotFoundException(aggregateIdentifier, "The aggregate was not found in the event store"); } EventSourcedAggregate<T> aggregate = EventSourcedAggregate .initialize(aggregateFactory.createAggregateRoot(aggregateIdentifier, eventStream.peek()), aggregateModel(), eventStore, repositoryProvider, trigger); aggregate.initializeState(eventStream); if (aggregate.isDeleted()) { throw new AggregateDeletedException(aggregateIdentifier); } return aggregate; }
/** * Initialize the state of this Event Sourced Aggregate with the events from the given {@code eventStream}. * * @param eventStream The Event Stream containing the events to be used to reconstruct this Aggregate's state. */ public void initializeState(DomainEventStream eventStream) { execute(r -> { this.initializing = true; try { eventStream.forEachRemaining(this::publish); initSequence(eventStream.getLastSequenceNumber()); } finally { this.initializing = false; snapshotTrigger.initializationFinished(); } }); }
private long nextSequenceNumber() { Long lastSequenceNumber = historic.getLastSequenceNumber(); return lastSequenceNumber == null ? 0 : lastSequenceNumber + 1; }
DomainEventStream eventStream = eventStore.readEvents(aggregateIdentifier); SnapshotTrigger trigger = snapshotTriggerDefinition.prepareTrigger(aggregateFactory.getAggregateType()); if (!eventStream.hasNext()) { throw new AggregateNotFoundException(aggregateIdentifier, "The aggregate was not found in the event store"); aggregateIdentifier, eventStream.peek()), model, eventStore, repositoryProvider, trigger );
@Override public DomainEventMessage<?> next() { if (!hasNext()) { return null; } DomainEventMessage<?> next = delegate.next(); lastSequenceNumber = next.getSequenceNumber(); return next; }
/** * {@inheritDoc} * <p> * This implementation returns a {@link DomainEventStream} starting with the last stored snapshot of the aggregate * followed by subsequent domain events. */ @Override public DomainEventStream readEvents(String aggregateIdentifier) { Optional<DomainEventMessage<?>> optionalSnapshot; try { optionalSnapshot = storageEngine.readSnapshot(aggregateIdentifier); } catch (Exception | LinkageError e) { optionalSnapshot = handleSnapshotReadingError(aggregateIdentifier, e); } DomainEventStream eventStream; if (optionalSnapshot.isPresent()) { DomainEventMessage<?> snapshot = optionalSnapshot.get(); eventStream = DomainEventStream.concat(DomainEventStream.of(snapshot), storageEngine.readEvents(aggregateIdentifier, snapshot.getSequenceNumber() + 1)); } else { eventStream = storageEngine.readEvents(aggregateIdentifier); } Stream<? extends DomainEventMessage<?>> domainEventMessages = stagedDomainEventMessages(aggregateIdentifier); return DomainEventStream.concat(eventStream, DomainEventStream.of(domainEventMessages)); }
/** * Open an event stream containing all domain events belonging to the given {@code aggregateIdentifier}. * <p> * The returned stream is <em>finite</em>, ending with the last known event of the aggregate. If the event store * holds no events of the given aggregate an empty stream is returned. * <p> * The default implementation invokes {@link #readEvents(String)} and then filters out events with a sequence number * smaller than {@code firstSequenceNumber}. * * @param aggregateIdentifier the identifier of the aggregate whose events to fetch * @param firstSequenceNumber the expected sequence number of the first event in the returned stream * @return a stream of all currently stored events of the aggregate */ default DomainEventStream readEvents(String aggregateIdentifier, long firstSequenceNumber) { DomainEventStream wholeStream = readEvents(aggregateIdentifier); return DomainEventStream .of(wholeStream.asStream().filter(event -> event.getSequenceNumber() >= firstSequenceNumber), wholeStream::getLastSequenceNumber); }
private void initActiveIfRequired() { if (actual == null && !historic.hasNext()) { actual = domainEventStream.apply(aggregateIdentifier, nextSequenceNumber()); } }
@Override public DomainEventMessage<?> peek() { if (!hasNext()) { return null; } return delegate.peek(); }
/** * Initialize the state of this Event Sourced Aggregate with the events from the given {@code eventStream}. * * @param eventStream The Event Stream containing the events to be used to reconstruct this Aggregate's state. */ public void initializeState(DomainEventStream eventStream) { execute(r -> { this.initializing = true; try { eventStream.forEachRemaining(this::publish); initSequence(eventStream.getLastSequenceNumber()); } finally { this.initializing = false; snapshotTrigger.initializationFinished(); } }); }