Runnable completionCallback, Supplier<ProducerRecord<K, V>> messageSupplier) { Properties props = getProducerProperties(producerName); Thread t = new Thread(() -> { LOGGER.debug("Starting producer {} to write {} messages", producerName, messageCount);
Properties props = getProducerProperties(producerName); KafkaProducer<K, V> kafkaProducer = new KafkaProducer<>(props, keySerializer, valueSerializer); InteractiveProducer<K, V> interactive = new InteractiveProducer<K, V>() {
@Override public void start() throws Exception { // Kafka setup for the example File dataDir = Testing.Files.createTestingDirectory("cluster"); dataDir.deleteOnExit(); kafkaCluster = new KafkaCluster() .usingDirectory(dataDir) .withPorts(2181, 9092) .addBrokers(1) .deleteDataPriorToStartup(true) .startup(); // Deploy the dashboard JsonObject consumerConfig = new JsonObject((Map) kafkaCluster.useTo() .getConsumerProperties("the_group", "the_client", OffsetResetStrategy.LATEST)); vertx.deployVerticle( DashboardVerticle.class.getName(), new DeploymentOptions().setConfig(consumerConfig) ); // Deploy the metrics collector : 3 times JsonObject producerConfig = new JsonObject((Map) kafkaCluster.useTo() .getProducerProperties("the_producer")); vertx.deployVerticle( MetricsVerticle.class.getName(), new DeploymentOptions().setConfig(producerConfig).setInstances(3) ); }
@Test public void testTopicExists(TestContext ctx) throws Exception { final String topicName = "testTopicExists"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); Async createAsync = ctx.async(); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, false); adminUtils.createTopic(topicName, 2, 1, ctx.asyncAssertSuccess( res -> createAsync.complete()) ); createAsync.awaitSuccess(10000); Async existsAndDeleteAsync = ctx.async(2); adminUtils.topicExists(topicName, ctx.asyncAssertSuccess(res -> existsAndDeleteAsync.countDown())); adminUtils.deleteTopic(topicName, ctx.asyncAssertSuccess(res -> existsAndDeleteAsync.countDown())); existsAndDeleteAsync.awaitSuccess(10000); }
@Test public void testCreateTopicWithZeroReplicas(TestContext ctx) throws Exception { final String topicName = "testCreateTopicWithZeroReplicas"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); Async async = ctx.async(); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, true); adminUtils.createTopic(topicName, 1, 0, ctx.asyncAssertFailure( res -> { ctx.assertEquals("Replication factor must be larger than 0.", res.getLocalizedMessage(), "Topic creation must fail: one Broker present, but zero replicas requested"); async.complete(); }) ); async.awaitSuccess(10000); }
@Test public void testCreateTopic(TestContext ctx) throws Exception { final String topicName = "testCreateTopic"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, false); Async createAsync = ctx.async(); adminUtils.createTopic(topicName, 1, 1, ctx.asyncAssertSuccess( res -> createAsync.complete()) ); createAsync.awaitSuccess(10000); Async deleteAsync = ctx.async(); adminUtils.deleteTopic(topicName, ctx.asyncAssertSuccess(res -> deleteAsync.complete())); deleteAsync.awaitSuccess(10000); }
@Test public void testTopicExists(TestContext ctx) throws Exception { final String topicName = "testTopicExists"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); Async createAsync = ctx.async(); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, false); adminUtils.createTopic(topicName, 2, 1, ctx.asyncAssertSuccess( res -> createAsync.complete()) ); createAsync.awaitSuccess(10000); Async existsAndDeleteAsync = ctx.async(2); adminUtils.topicExists(topicName, ctx.asyncAssertSuccess(res -> existsAndDeleteAsync.countDown())); adminUtils.deleteTopic(topicName, ctx.asyncAssertSuccess(res -> existsAndDeleteAsync.countDown())); existsAndDeleteAsync.awaitSuccess(10000); }
@Test public void testCreateTopicWithZeroReplicas(TestContext ctx) throws Exception { final String topicName = "testCreateTopicWithZeroReplicas"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); Async async = ctx.async(); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, true); adminUtils.createTopic(topicName, 1, 0, ctx.asyncAssertFailure( res -> { ctx.assertEquals("Replication factor must be larger than 0.", res.getLocalizedMessage(), "Topic creation must fail: one Broker present, but zero replicas requested"); async.complete(); }) ); async.awaitSuccess(10000); }
@Test public void testDeleteTopic(TestContext ctx) throws Exception { final String topicName = "testDeleteTopic"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); Async async = ctx.async(); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, false); adminUtils.createTopic(topicName, 1, 1, ctx.asyncAssertSuccess( res -> async.complete()) ); async.awaitSuccess(10000); Async deleteAsync = ctx.async(); adminUtils.deleteTopic(topicName, ctx.asyncAssertSuccess(res -> deleteAsync.complete())); deleteAsync.awaitSuccess(10000); }
@Test public void testProducerProduce(TestContext ctx) throws Exception { String topicName = "testProducerProduce"; Properties config = kafkaCluster.useTo().getProducerProperties("testProducerProduce_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); producer = producer(Vertx.vertx(), config); producer.exceptionHandler(ctx::fail); KafkaProducer<String, String> producer = new KafkaProducerImpl<>(this.producer); int numMessages = 100000; for (int i = 0;i < numMessages;i++) { producer.write(KafkaProducerRecord.create(topicName, "key-" + i, "value-" + i, 0) .addHeader("header_key", "header_value-" + i)); } assertReceiveMessages(ctx, topicName, numMessages); }
@Test public void testCreateTopic(TestContext ctx) throws Exception { final String topicName = "testCreateTopic"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, false); Async createAsync = ctx.async(); adminUtils.createTopic(topicName, 1, 1, ctx.asyncAssertSuccess( res -> createAsync.complete()) ); createAsync.awaitSuccess(10000); Async deleteAsync = ctx.async(); adminUtils.deleteTopic(topicName, ctx.asyncAssertSuccess(res -> deleteAsync.complete())); deleteAsync.awaitSuccess(10000); }
@Test public void testDeleteNonExistingTopic(TestContext ctx) throws Exception { final String topicName = "testDeleteNonExistingTopic"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, true); Async async = ctx.async(); adminUtils.deleteTopic(topicName, ctx.asyncAssertFailure(res -> { ctx.assertEquals("Topic `"+topicName+"` to delete does not exist", res.getLocalizedMessage(), "Topic must not exist (not created before)"); async.complete(); }) ); }
@Test public void testDeleteTopic(TestContext ctx) throws Exception { final String topicName = "testDeleteTopic"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); Async async = ctx.async(); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, false); adminUtils.createTopic(topicName, 1, 1, ctx.asyncAssertSuccess( res -> async.complete()) ); async.awaitSuccess(10000); Async deleteAsync = ctx.async(); adminUtils.deleteTopic(topicName, ctx.asyncAssertSuccess(res -> deleteAsync.complete())); deleteAsync.awaitSuccess(10000); }
@Test public void testStreamProduce(TestContext ctx) throws Exception { String topicName = "testStreamProduce"; Properties config = kafkaCluster.useTo().getProducerProperties("testStreamProduce_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); producer = producer(Vertx.vertx(), config); producer.exceptionHandler(ctx::fail); int numMessages = 100000; for (int i = 0;i < numMessages;i++) { ProducerRecord<String, String> record = new ProducerRecord<>(topicName, 0, "key-" + i, "value-" + i); record.headers().add("header_key", ("header_value-" + i).getBytes()); producer.write(record); } assertReceiveMessages(ctx, topicName, numMessages); }
@Test public void testProducerProduce(TestContext ctx) throws Exception { String topicName = "testProducerProduce"; Properties config = kafkaCluster.useTo().getProducerProperties("testProducerProduce_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); producer = producer(Vertx.vertx(), config); producer.exceptionHandler(ctx::fail); KafkaProducer<String, String> producer = new KafkaProducerImpl<>(this.producer); int numMessages = 100000; for (int i = 0;i < numMessages;i++) { producer.write(KafkaProducerRecord.create(topicName, "key-" + i, "value-" + i, 0) .addHeader("header_key", "header_value-" + i)); } assertReceiveMessages(ctx, topicName, numMessages); }
@Test public void testDeleteNonExistingTopic(TestContext ctx) throws Exception { final String topicName = "testDeleteNonExistingTopic"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, true); Async async = ctx.async(); adminUtils.deleteTopic(topicName, ctx.asyncAssertFailure(res -> { ctx.assertEquals("Topic `"+topicName+"` to delete does not exist", res.getLocalizedMessage(), "Topic must not exist (not created before)"); async.complete(); }) ); }
@Test public void testStreamProduce(TestContext ctx) throws Exception { String topicName = "testStreamProduce"; Properties config = kafkaCluster.useTo().getProducerProperties("testStreamProduce_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); producer = producer(Vertx.vertx(), config); producer.exceptionHandler(ctx::fail); int numMessages = 100000; for (int i = 0;i < numMessages;i++) { ProducerRecord<String, String> record = new ProducerRecord<>(topicName, 0, "key-" + i, "value-" + i); record.headers().add("header_key", ("header_value-" + i).getBytes()); producer.write(record); } assertReceiveMessages(ctx, topicName, numMessages); }
@Test public void testTopicExistsNonExisting(TestContext ctx) throws Exception { final String topicName = "testTopicExistsNonExisting"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); Async createAsync = ctx.async(); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, true); adminUtils.topicExists(topicName, ctx.asyncAssertSuccess(res -> { ctx.assertFalse(res, "Topic must not exist"); createAsync.complete(); }) ); createAsync.awaitSuccess(10000); }
@Test public void testTopicExistsNonExisting(TestContext ctx) throws Exception { final String topicName = "testTopicExistsNonExisting"; Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); Async createAsync = ctx.async(); AdminUtils adminUtils = AdminUtils.create(vertx, zookeeperHosts, true); adminUtils.topicExists(topicName, ctx.asyncAssertSuccess(res -> { ctx.assertFalse(res, "Topic must not exist"); createAsync.complete(); }) ); createAsync.awaitSuccess(10000); }
/** * Create an {@link InteractiveProducer simple producer} that can be used to write messages to the cluster. * * @param producerName the name of the producer; may not be null * @param keySerializer the serializer for the keys; may not be null * @param valueSerializer the serializer for the values; may not be null * @return the object that can be used to produce messages; never null */ public <K, V> InteractiveProducer<K, V> createProducer(String producerName, Serializer<K> keySerializer, Serializer<V> valueSerializer) { Properties props = getProducerProperties(producerName); KafkaProducer<K, V> producer = new KafkaProducer<>(props, keySerializer, valueSerializer); return new InteractiveProducer<K, V>() { @Override public InteractiveProducer<K, V> write(ProducerRecord<K, V> record) { producer.send(record); producer.flush(); return this; } @Override public void close() { producer.close(); } }; }