public void start(Consumer<EVENT> eventConsumer) { Optional<BinlogFileOffset> startingBinlogFileOffset = offsetStore.getLastBinlogFileOffset(); process(eventConsumer, startingBinlogFileOffset); }
public void start(Consumer<EVENT> eventConsumer) { Optional<BinlogFileOffset> startingBinlogFileOffset = offsetStore.getLastBinlogFileOffset(); dbLogClient.start(startingBinlogFileOffset, new Consumer<EVENT>() { private boolean couldReadDuplicateEntries = true; @Override public void accept(EVENT publishedEvent) { if (couldReadDuplicateEntries) { if (startingBinlogFileOffset.map(s -> s.isSameOrAfter(publishedEvent.getBinlogFileOffset())).orElse(false)) { return; } else { couldReadDuplicateEntries = false; } } eventConsumer.accept(publishedEvent); } }); }
@Bean public CdcDataPublisher<PublishedEvent> cdcKafkaPublisher(DataProducerFactory dataProducerFactory, EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, OffsetStore offsetStore, PublishingStrategy<PublishedEvent> publishingStrategy, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties) { return new DbLogBasedCdcDataPublisher<>(dataProducerFactory, offsetStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); }
@Override public void onEventSent(PublishedEvent publishedEvent) { offsetStore.save(publishedEvent.getBinlogFileOffset()); } }
public DatabaseOffsetKafkaStore getDatabaseOffsetKafkaStore(String topicName, String key) { return new DatabaseOffsetKafkaStore(topicName, key, eventuateKafkaProducer, eventuateKafkaConfigurationProperties, EventuateKafkaConsumerConfigurationProperties.empty()); }
@Bean public CdcProcessor<PublishedEvent> cdcProcessor(DbLogClient<PublishedEvent> dbLogClient, OffsetStore offsetStore) { return new DbLogBasedCdcProcessor<>(dbLogClient, offsetStore); } }
protected void process(Consumer<EVENT> eventConsumer, Optional<BinlogFileOffset> startingBinlogFileOffset) { try { dbLogClient.start(startingBinlogFileOffset, new Consumer<EVENT>() { private boolean couldReadDuplicateEntries = true; @Override public void accept(EVENT publishedEvent) { if (couldReadDuplicateEntries) { if (startingBinlogFileOffset.map(s -> s.isSameOrAfter(publishedEvent.getBinlogFileOffset())).orElse(false)) { return; } else { couldReadDuplicateEntries = false; } } eventConsumer.accept(publishedEvent); } }); } catch (Exception e) { throw new RuntimeException(e); } }
public synchronized void stop() { if (this.recordToSave.isPresent()) this.store(this.recordToSave.get()); this.scheduledExecutorService.shutdown(); }
@Override public void onEventSent(PublishedEvent publishedEvent) { createDatabaseOffsetKafkaStore(createMySqlBinaryLogClient()).save(publishedEvent.getBinlogFileOffset()); }
@Bean @Profile("!EventuatePolling") public CdcDataPublisher<MessageWithDestination> cdcKafkaPublisher(EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, DatabaseOffsetKafkaStore databaseOffsetKafkaStore, PublishingStrategy<MessageWithDestination> publishingStrategy, EventuateKafkaProducerConfigurationProperties eventuateKafkaProducerConfigurationProperties, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties) { return new DbLogBasedCdcDataPublisher<MessageWithDestination>(() -> new EventuateKafkaProducer(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaProducerConfigurationProperties), databaseOffsetKafkaStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); }
@Override public void onEventSent(PublishedEvent publishedEvent) { offsetStore.save(publishedEvent.getBinlogFileOffset()); } }
@Bean public CdcProcessor<PublishedEvent> cdcProcessor(DbLogClient<PublishedEvent> dbLogClient, DatabaseOffsetKafkaStore databaseOffsetKafkaStore) { return new DbLogBasedCdcProcessor<>(dbLogClient, databaseOffsetKafkaStore); } }
@Bean @Profile("!EventuatePolling") public CdcDataPublisher<MessageWithDestination> cdcKafkaPublisher(EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, DatabaseOffsetKafkaStore databaseOffsetKafkaStore, PublishingStrategy<MessageWithDestination> publishingStrategy, EventuateKafkaProducerConfigurationProperties eventuateKafkaProducerConfigurationProperties, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties) { return new DbLogBasedCdcDataPublisher<MessageWithDestination>(() -> new EventuateKafkaProducer(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaProducerConfigurationProperties), databaseOffsetKafkaStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); }
@Override public void onEventSent(PublishedEvent publishedEvent) { offsetStore.save(publishedEvent.getBinlogFileOffset()); } }
@Bean @Profile("PostgresWal") public DbLogBasedCdcDataPublisher<PublishedEvent> dbLogBasedCdcKafkaPublisher(DataProducerFactory dataProducerFactory, EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties, OffsetStore offsetStore, PublishingStrategy<PublishedEvent> publishingStrategy) { return new DbLogBasedCdcDataPublisher<>(dataProducerFactory, offsetStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); }
@Bean @Conditional(MySqlBinlogCondition.class) public CdcDataPublisher<PublishedEvent> cdcKafkaPublisher(DataProducerFactory dataProducerFactory, EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties, OffsetStore offsetStore, PublishingStrategy<PublishedEvent> publishingStrategy) { return new DbLogBasedCdcDataPublisher<>(dataProducerFactory, offsetStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); } }
@Override protected CdcDataPublisher<PublishedEvent> createCdcKafkaPublisher() { return new DbLogBasedCdcDataPublisher<>(() -> new EventuateKafkaProducer(eventuateKafkaConfigurationProperties.getBootstrapServers(), EventuateKafkaProducerConfigurationProperties.empty()), offsetStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), EventuateKafkaConsumerConfigurationProperties.empty()), publishingStrategy); } }