@Override public KafkaProducer<K, V> exceptionHandler(Handler<Throwable> handler) { this.stream.exceptionHandler(handler); return this; }
@Test public void testStreamProduce(TestContext ctx) throws Exception { String topicName = "testStreamProduce"; Properties config = kafkaCluster.useTo().getProducerProperties("testStreamProduce_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); producer = producer(Vertx.vertx(), config); producer.exceptionHandler(ctx::fail); int numMessages = 100000; for (int i = 0;i < numMessages;i++) { ProducerRecord<String, String> record = new ProducerRecord<>(topicName, 0, "key-" + i, "value-" + i); record.headers().add("header_key", ("header_value-" + i).getBytes()); producer.write(record); } assertReceiveMessages(ctx, topicName, numMessages); }
@Test public void testStreamProduce(TestContext ctx) throws Exception { String topicName = "testStreamProduce"; Properties config = kafkaCluster.useTo().getProducerProperties("testStreamProduce_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); producer = producer(Vertx.vertx(), config); producer.exceptionHandler(ctx::fail); int numMessages = 100000; for (int i = 0;i < numMessages;i++) { ProducerRecord<String, String> record = new ProducerRecord<>(topicName, 0, "key-" + i, "value-" + i); record.headers().add("header_key", ("header_value-" + i).getBytes()); producer.write(record); } assertReceiveMessages(ctx, topicName, numMessages); }
@Test public void testProducerError(TestContext ctx) throws Exception { TestProducer mock = new TestProducer(); KafkaWriteStream<String, String> producer = ProducerTest.producer(Vertx.vertx(), mock); producer.write(new ProducerRecord<>("the_topic", 0, 0L, "abc", "def")); RuntimeException cause = new RuntimeException(); Async async = ctx.async(); producer.exceptionHandler(err -> { ctx.assertEquals(cause, err); async.complete(); }); mock.assertErrorNext(cause); }
@Test public void testProducerError(TestContext ctx) throws Exception { TestProducer mock = new TestProducer(); KafkaWriteStream<String, String> producer = ProducerTest.producer(Vertx.vertx(), mock); producer.write(new ProducerRecord<>("the_topic", 0, 0L, "abc", "def")); RuntimeException cause = new RuntimeException(); Async async = ctx.async(); producer.exceptionHandler(err -> { ctx.assertEquals(cause, err); async.complete(); }); mock.assertErrorNext(cause); }
@Test public void testProducerProduce(TestContext ctx) throws Exception { String topicName = "testProducerProduce"; Properties config = kafkaCluster.useTo().getProducerProperties("testProducerProduce_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); producer = producer(Vertx.vertx(), config); producer.exceptionHandler(ctx::fail); KafkaProducer<String, String> producer = new KafkaProducerImpl<>(this.producer); int numMessages = 100000; for (int i = 0;i < numMessages;i++) { producer.write(KafkaProducerRecord.create(topicName, "key-" + i, "value-" + i, 0) .addHeader("header_key", "header_value-" + i)); } assertReceiveMessages(ctx, topicName, numMessages); }
@Test public void testProducerProduce(TestContext ctx) throws Exception { String topicName = "testProducerProduce"; Properties config = kafkaCluster.useTo().getProducerProperties("testProducerProduce_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); producer = producer(Vertx.vertx(), config); producer.exceptionHandler(ctx::fail); KafkaProducer<String, String> producer = new KafkaProducerImpl<>(this.producer); int numMessages = 100000; for (int i = 0;i < numMessages;i++) { producer.write(KafkaProducerRecord.create(topicName, "key-" + i, "value-" + i, 0) .addHeader("header_key", "header_value-" + i)); } assertReceiveMessages(ctx, topicName, numMessages); }
KafkaWriteStream<K, V> writeStream = producerFactory.apply(producerConfig); producer = writeStream; writeStream.exceptionHandler(ctx::fail); int numMessages = 100000; ConcurrentLinkedDeque<K> keys = new ConcurrentLinkedDeque<K>();
KafkaWriteStream<K, V> writeStream = producerFactory.apply(producerConfig); producer = writeStream; writeStream.exceptionHandler(ctx::fail); int numMessages = 100000; ConcurrentLinkedDeque<K> keys = new ConcurrentLinkedDeque<K>();