/** * @return the buffer key */ public String key() { if (cached_0 != null) { return cached_0; } String ret = delegate.key(); cached_0 = ret; return ret; }
/** * @return the buffer key */ public String key() { if (cached_0 != null) { return cached_0; } String ret = delegate.key(); cached_0 = ret; return ret; }
@Override public ProducerRecord record() { if (headers.isEmpty()) { return new ProducerRecord<>(topic, partition, timestamp, key, value); } else { return new ProducerRecord<>( topic, partition, timestamp, key, value, headers.stream() .map(header -> new RecordHeader(header.key(), header.value().getBytes())) .collect(Collectors.toList())); } }
@Test public void testConsumerWithHeader(TestContext ctx) { int numMessages = 1000; String topicName = "testConsumerWithHeader"; Properties config = setupConsumeWithHeaders(ctx, numMessages, topicName); consumer = createConsumer(vertx, config); KafkaConsumer<String, String> consumer = new KafkaConsumerImpl<>(this.consumer); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); AtomicInteger headerIndex = new AtomicInteger(); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { List<KafkaHeader> headers = rec.headers(); ctx.assertEquals(1, headers.size()); KafkaHeader header = headers.get(0); ctx.assertEquals("header_key" + headerIndex.get(), header.key()); ctx.assertEquals("header_value" + headerIndex.getAndIncrement(), header.value().toString()); if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testConsumerWithHeader(TestContext ctx) { int numMessages = 1000; String topicName = "testConsumerWithHeader"; Properties config = setupConsumeWithHeaders(ctx, numMessages, topicName); consumer = createConsumer(vertx, config); KafkaConsumer<String, String> consumer = new KafkaConsumerImpl<>(this.consumer); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); AtomicInteger headerIndex = new AtomicInteger(); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { List<KafkaHeader> headers = rec.headers(); ctx.assertEquals(1, headers.size()); KafkaHeader header = headers.get(0); ctx.assertEquals("header_key" + headerIndex.get(), header.key()); ctx.assertEquals("header_value" + headerIndex.getAndIncrement(), header.value().toString()); if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testRecordWithHeaders() { List<KafkaHeader> headers = Arrays.asList( KafkaHeader.header("key1", "value1"), KafkaHeader.header("key2", "value2") ); List<KafkaHeader> recordHeaders = KafkaProducerRecord.create("mytopic", "mykey", "myvalue").addHeaders(headers).headers(); assertNotNull(recordHeaders); assertEquals(2, recordHeaders.size()); KafkaHeader kafkaHeader1 = recordHeaders.get(0); assertEquals("key1", kafkaHeader1.key()); assertEquals("value1", kafkaHeader1.value().toString()); KafkaHeader kafkaHeader2 = recordHeaders.get(1); assertEquals("key2", kafkaHeader2.key()); assertEquals("value2", kafkaHeader2.value().toString()); }
@Test public void testRecordWithHeaders() { List<KafkaHeader> headers = Arrays.asList( KafkaHeader.header("key1", "value1"), KafkaHeader.header("key2", "value2") ); List<KafkaHeader> recordHeaders = KafkaProducerRecord.create("mytopic", "mykey", "myvalue").addHeaders(headers).headers(); assertNotNull(recordHeaders); assertEquals(2, recordHeaders.size()); KafkaHeader kafkaHeader1 = recordHeaders.get(0); assertEquals("key1", kafkaHeader1.key()); assertEquals("value1", kafkaHeader1.value().toString()); KafkaHeader kafkaHeader2 = recordHeaders.get(1); assertEquals("key2", kafkaHeader2.key()); assertEquals("value2", kafkaHeader2.value().toString()); }