/** * @return the list of consumer record headers */ public List<io.vertx.rxjava.kafka.client.producer.KafkaHeader> headers() { List<io.vertx.rxjava.kafka.client.producer.KafkaHeader> ret = delegate.headers().stream().map(elt -> io.vertx.rxjava.kafka.client.producer.KafkaHeader.newInstance(elt)).collect(java.util.stream.Collectors.toList()); return ret; }
/** * @return the list of consumer record headers */ public List<io.vertx.rxjava.kafka.client.producer.KafkaHeader> headers() { List<io.vertx.rxjava.kafka.client.producer.KafkaHeader> ret = delegate.headers().stream().map(elt -> io.vertx.rxjava.kafka.client.producer.KafkaHeader.newInstance(elt)).collect(java.util.stream.Collectors.toList()); return ret; }
@Test public void testConsumerWithHeader(TestContext ctx) { int numMessages = 1000; String topicName = "testConsumerWithHeader"; Properties config = setupConsumeWithHeaders(ctx, numMessages, topicName); consumer = createConsumer(vertx, config); KafkaConsumer<String, String> consumer = new KafkaConsumerImpl<>(this.consumer); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); AtomicInteger headerIndex = new AtomicInteger(); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { List<KafkaHeader> headers = rec.headers(); ctx.assertEquals(1, headers.size()); KafkaHeader header = headers.get(0); ctx.assertEquals("header_key" + headerIndex.get(), header.key()); ctx.assertEquals("header_value" + headerIndex.getAndIncrement(), header.value().toString()); if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testConsumerWithHeader(TestContext ctx) { int numMessages = 1000; String topicName = "testConsumerWithHeader"; Properties config = setupConsumeWithHeaders(ctx, numMessages, topicName); consumer = createConsumer(vertx, config); KafkaConsumer<String, String> consumer = new KafkaConsumerImpl<>(this.consumer); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); AtomicInteger headerIndex = new AtomicInteger(); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { List<KafkaHeader> headers = rec.headers(); ctx.assertEquals(1, headers.size()); KafkaHeader header = headers.get(0); ctx.assertEquals("header_key" + headerIndex.get(), header.key()); ctx.assertEquals("header_value" + headerIndex.getAndIncrement(), header.value().toString()); if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }