public Flux<?> flux() { KafkaSender<Integer, Person> sender = sender(senderOptions()); return KafkaReceiver.create(receiverOptions(Collections.singleton(sourceTopic))) .receiveAtmostOnce() .map(cr -> SenderRecord.create(transform(cr.value()), cr.offset())) .as(sender::send) .doOnCancel(() -> close()); } public ProducerRecord<Integer, Person> transform(Person p) {
@Test public void atmostOnce() throws Exception { receiverOptions.closeTimeout(Duration.ofMillis(1000)); KafkaReceiver<Integer, String> receiver = createReceiver(); Flux<? extends ConsumerRecord<Integer, String>> kafkaFlux = receiver.receiveAtmostOnce(); sendReceive(kafkaFlux, 0, 10, 0, 10); // Second consumer should receive only new messages even though first one was not closed gracefully restartAndCheck(receiver, 10, 10, 0); }
private void restartAndCheck(KafkaReceiver<Integer, String> receiver, int sendStartIndex, int sendCount, int maxRedelivered) throws Exception { Thread.sleep(500); // Give a little time for commits to complete before terminating abruptly new TestableReceiver(receiver).terminate(); cancelSubscriptions(true); clearReceivedMessages(); Flux<? extends ConsumerRecord<Integer, String>> kafkaFlux2 = createReceiver().receiveAtmostOnce(); sendReceiveWithRedelivery(kafkaFlux2, sendStartIndex, sendCount, 0, maxRedelivered); clearReceivedMessages(); cancelSubscriptions(false); }