/** * Asynchronously produce messages with monotonically increasing String keys and values obtained from the supplied * function, and write them to the cluster. * * @param topic the name of the topic to which the messages should be written; may not be null * @param messageCount the number of messages to produce; must be positive * @param completionCallback the function to be called when the producer is completed; may be null * @param valueSupplier the value supplier; may not be null */ public void produceStrings(String topic, int messageCount, Runnable completionCallback, Supplier<String> valueSupplier) { AtomicLong counter = new AtomicLong(0); produceStrings(messageCount, completionCallback, () -> { long i = counter.incrementAndGet(); String keyAndValue = Long.toString(i); return new ProducerRecord<String, String>(topic, keyAndValue, valueSupplier.get()); }); }
private Properties setupConsumeWithHeaders(TestContext ctx, int numMessages, String topicName) { Async batch = ctx.async(); AtomicInteger index = new AtomicInteger(); kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.get(), Collections.singletonList(new RecordHeader("header_key" + index.get(), ("header_value" + index.getAndIncrement()).getBytes())))); batch.awaitSuccess(20000); Properties config = kafkaCluster.useTo().getConsumerProperties(topicName, topicName, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); return config; }
private Properties setupConsumeWithHeaders(TestContext ctx, int numMessages, String topicName) { Async batch = ctx.async(); AtomicInteger index = new AtomicInteger(); kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.get(), Collections.singletonList(new RecordHeader("header_key" + index.get(), ("header_value" + index.getAndIncrement()).getBytes())))); batch.awaitSuccess(20000); Properties config = kafkaCluster.useTo().getConsumerProperties(topicName, topicName, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); return config; }
AtomicInteger index = new AtomicInteger(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000);
AtomicInteger index = new AtomicInteger(); int numMessages = 10; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000);
AtomicInteger index = new AtomicInteger(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000);
AtomicInteger index = new AtomicInteger(); int numMessages = 500; kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000);
int numMessages = 1; Async finished = ctx.async(numMessages + 2); kafkaCluster.useTo().produceStrings(numMessages, finished::countDown, () -> new ProducerRecord<>(topicName, 0, "key", "value")); Context context = vertx.getOrCreateContext();
AtomicInteger index = new AtomicInteger(); int numMessages = 10; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000);
Async batch = ctx.async(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, "value") );
int numMessages = 1; Async finished = ctx.async(numMessages + 2); kafkaCluster.useTo().produceStrings(numMessages, finished::countDown, () -> new ProducerRecord<>(topicName, 0, "key", "value")); Context context = vertx.getOrCreateContext();
AtomicInteger index = new AtomicInteger(); int numMessages = 500; kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000);
AtomicInteger index = new AtomicInteger(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000);
int numMessages = 1; Async finished = ctx.async(numMessages + 2); kafkaCluster.useTo().produceStrings(numMessages, finished::countDown, () -> new ProducerRecord<>(topicName, 0, "key", "value")); Context context = vertx.getOrCreateContext();
Async batch1 = ctx.async(); AtomicInteger index = new AtomicInteger(); kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topic, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000);
Async batch1 = ctx.async(); AtomicInteger index = new AtomicInteger(); kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topic, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000);
@Test public void testBatchHandler(TestContext ctx) throws Exception { String topicName = "testBatchHandler"; String consumerId = topicName; Async batch1 = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 500; kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async batchHandler = ctx.async(); consumer.batchHandler(records -> { ctx.assertEquals(numMessages, records.count()); batchHandler.complete(); }); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> {}); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testBatchHandler(TestContext ctx) throws Exception { String topicName = "testBatchHandler"; String consumerId = topicName; Async batch1 = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 500; kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async batchHandler = ctx.async(); consumer.batchHandler(records -> { ctx.assertEquals(numMessages, records.count()); batchHandler.complete(); }); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> {}); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testConsume(TestContext ctx) throws Exception { final String topicName = "testConsume"; String consumerId = topicName; Async batch = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testConsume(TestContext ctx) throws Exception { final String topicName = "testConsume"; String consumerId = topicName; Async batch = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }