@Bean public CdcDataPublisher<PublishedEvent> cdcKafkaPublisher(DataProducerFactory dataProducerFactory, EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, OffsetStore offsetStore, PublishingStrategy<PublishedEvent> publishingStrategy, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties) { return new DbLogBasedCdcDataPublisher<>(dataProducerFactory, offsetStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); }
@Bean @Profile("!EventuatePolling") public CdcDataPublisher<MessageWithDestination> cdcKafkaPublisher(EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, DatabaseOffsetKafkaStore databaseOffsetKafkaStore, PublishingStrategy<MessageWithDestination> publishingStrategy, EventuateKafkaProducerConfigurationProperties eventuateKafkaProducerConfigurationProperties, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties) { return new DbLogBasedCdcDataPublisher<MessageWithDestination>(() -> new EventuateKafkaProducer(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaProducerConfigurationProperties), databaseOffsetKafkaStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); }
@Bean @Profile("!EventuatePolling") public CdcDataPublisher<MessageWithDestination> cdcKafkaPublisher(EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, DatabaseOffsetKafkaStore databaseOffsetKafkaStore, PublishingStrategy<MessageWithDestination> publishingStrategy, EventuateKafkaProducerConfigurationProperties eventuateKafkaProducerConfigurationProperties, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties) { return new DbLogBasedCdcDataPublisher<MessageWithDestination>(() -> new EventuateKafkaProducer(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaProducerConfigurationProperties), databaseOffsetKafkaStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); }
@Bean @Profile("PostgresWal") public DbLogBasedCdcDataPublisher<PublishedEvent> dbLogBasedCdcKafkaPublisher(DataProducerFactory dataProducerFactory, EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties, OffsetStore offsetStore, PublishingStrategy<PublishedEvent> publishingStrategy) { return new DbLogBasedCdcDataPublisher<>(dataProducerFactory, offsetStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); }
@Bean @Conditional(MySqlBinlogCondition.class) public CdcDataPublisher<PublishedEvent> cdcKafkaPublisher(DataProducerFactory dataProducerFactory, EventuateKafkaConfigurationProperties eventuateKafkaConfigurationProperties, EventuateKafkaConsumerConfigurationProperties eventuateKafkaConsumerConfigurationProperties, OffsetStore offsetStore, PublishingStrategy<PublishedEvent> publishingStrategy) { return new DbLogBasedCdcDataPublisher<>(dataProducerFactory, offsetStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), eventuateKafkaConsumerConfigurationProperties), publishingStrategy); } }
@Override protected CdcDataPublisher<PublishedEvent> createCdcKafkaPublisher() { return new DbLogBasedCdcDataPublisher<>(() -> new EventuateKafkaProducer(eventuateKafkaConfigurationProperties.getBootstrapServers(), EventuateKafkaProducerConfigurationProperties.empty()), offsetStore, new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), EventuateKafkaConsumerConfigurationProperties.empty()), publishingStrategy); } }
@Test public void emptyTopicTest() { DuplicatePublishingDetector duplicatePublishingDetector = new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), EventuateKafkaConsumerConfigurationProperties.empty()); BinlogFileOffset bfo = generateBinlogFileOffset(); assertTrue(duplicatePublishingDetector.shouldBePublished(bfo, generateUniqueTopicName())); }
@Test public void shouldHandlePublishCheckForOldEntires() { String topicName = generateUniqueTopicName(); String binlogFilename = "binlog.file." + System.currentTimeMillis(); DuplicatePublishingDetector duplicatePublishingDetector = new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), EventuateKafkaConsumerConfigurationProperties.empty()); Producer<String, String> producer = createProducer(eventuateKafkaConfigurationProperties.getBootstrapServers()); floodTopic(producer, binlogFilename, topicName); sendOldPublishedEvent(producer, topicName); producer.close(); assertTrue(duplicatePublishingDetector.shouldBePublished(new BinlogFileOffset(binlogFilename, 10L), topicName)); }
@Test public void shouldBePublishedTest() { String topicName = generateUniqueTopicName(); String binlogFilename = "binlog.file." + System.currentTimeMillis(); DuplicatePublishingDetector duplicatePublishingDetector = new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), EventuateKafkaConsumerConfigurationProperties.empty()); Producer<String, String> producer = createProducer(eventuateKafkaConfigurationProperties.getBootstrapServers()); floodTopic(producer, binlogFilename, topicName); producer.close(); assertFalse(duplicatePublishingDetector.shouldBePublished(new BinlogFileOffset(binlogFilename, 1L), topicName)); assertTrue(duplicatePublishingDetector.shouldBePublished(new BinlogFileOffset(binlogFilename, 10L), topicName)); }