@Override public KafkaConsumer<K, V> exceptionHandler(Handler<Throwable> handler) { this.stream.exceptionHandler(handler); return this; }
@Test public void testPollExceptionHandler(TestContext ctx) throws Exception { Properties config = kafkaCluster.useTo().getConsumerProperties("someRandomGroup", "someRandomClientID", OffsetResetStrategy.EARLIEST); config.remove("group.id"); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); consumer.exceptionHandler(ex -> { ctx.assertTrue(ex instanceof InvalidGroupIdException); done.complete(); }); consumer.subscribe(Collections.singleton("someTopic")).handler(System.out::println); }
@Test public void testPollExceptionHandler(TestContext ctx) throws Exception { Properties config = kafkaCluster.useTo().getConsumerProperties("someRandomGroup", "someRandomClientID", OffsetResetStrategy.EARLIEST); config.remove("group.id"); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); consumer.exceptionHandler(ex -> { ctx.assertTrue(ex instanceof InvalidGroupIdException); done.complete(); }); consumer.subscribe(Collections.singleton("someTopic")).handler(System.out::println); }
@Test public void testConsume(TestContext ctx) throws Exception { final String topicName = "testConsume"; String consumerId = topicName; Async batch = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
@Test public void testConsume(TestContext ctx) throws Exception { final String topicName = "testConsume"; String consumerId = topicName; Async batch = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 1000; kafkaCluster.useTo().produceStrings(numMessages, batch::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch.awaitSuccess(20000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); AtomicBoolean paused = new AtomicBoolean(); consumer.handler(rec -> {
Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); AtomicBoolean paused = new AtomicBoolean(); consumer.handler(rec -> {
@Test public void testStreamWithHeader(TestContext ctx) { int numMessages = 1000; String topicName = "testStreamWithHeader"; Properties config = setupConsumeWithHeaders(ctx, numMessages, topicName); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); AtomicInteger headerIndex = new AtomicInteger(); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { Header[] headers = rec.headers().toArray(); ctx.assertEquals(1, headers.length); Header header = headers[0]; ctx.assertEquals("header_key" + headerIndex.get(), header.key()); ctx.assertEquals("header_value" + headerIndex.getAndIncrement(), new String(header.value())); if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); AtomicLong demand = new AtomicLong(); long batchSize = 200L;
Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); consumer.exceptionHandler(ctx::fail); AtomicLong demand = new AtomicLong(); long batchSize = 200L;
@Test public void testStreamWithHeader(TestContext ctx) { int numMessages = 1000; String topicName = "testStreamWithHeader"; Properties config = setupConsumeWithHeaders(ctx, numMessages, topicName); consumer = createConsumer(vertx, config); Async done = ctx.async(); AtomicInteger count = new AtomicInteger(numMessages); AtomicInteger headerIndex = new AtomicInteger(); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { Header[] headers = rec.headers().toArray(); ctx.assertEquals(1, headers.length); Header header = headers[0]; ctx.assertEquals("header_key" + headerIndex.get(), header.key()); ctx.assertEquals("header_value" + headerIndex.getAndIncrement(), new String(header.value())); if (count.decrementAndGet() == 0) { done.complete(); } }); consumer.subscribe(Collections.singleton(topicName)); }
consumer = readStream; AtomicInteger count = new AtomicInteger(numMessages); readStream.exceptionHandler(ctx::fail); readStream.handler(rec -> { ctx.assertEquals(keys.pop(), rec.key());
consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { if (count.decrementAndGet() == 0) {
consumer = readStream; AtomicInteger count = new AtomicInteger(numMessages); readStream.exceptionHandler(ctx::fail); readStream.handler(rec -> { ctx.assertEquals(keys.pop(), rec.key());
@Test public void testBatchHandler(TestContext ctx) throws Exception { String topicName = "testBatchHandler"; String consumerId = topicName; Async batch1 = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 500; kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async batchHandler = ctx.async(); consumer.batchHandler(records -> { ctx.assertEquals(numMessages, records.count()); batchHandler.complete(); }); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> {}); consumer.subscribe(Collections.singleton(topicName)); }
consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> { if (count.decrementAndGet() == 0) {
@Test public void testBatchHandler(TestContext ctx) throws Exception { String topicName = "testBatchHandler"; String consumerId = topicName; Async batch1 = ctx.async(); AtomicInteger index = new AtomicInteger(); int numMessages = 500; kafkaCluster.useTo().produceStrings(numMessages, batch1::complete, () -> new ProducerRecord<>(topicName, 0, "key-" + index.get(), "value-" + index.getAndIncrement())); batch1.awaitSuccess(10000); Properties config = kafkaCluster.useTo().getConsumerProperties(consumerId, consumerId, OffsetResetStrategy.EARLIEST); config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); Context context = vertx.getOrCreateContext(); consumer = createConsumer(context, config); Async batchHandler = ctx.async(); consumer.batchHandler(records -> { ctx.assertEquals(numMessages, records.count()); batchHandler.complete(); }); consumer.exceptionHandler(ctx::fail); consumer.handler(rec -> {}); consumer.subscribe(Collections.singleton(topicName)); }
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); consumer = createConsumer(vertx, config); consumer.exceptionHandler(ctx::fail); Async committed = ctx.async(2); AtomicInteger count = new AtomicInteger();
consumer = createConsumer(context, config); consumer.exceptionHandler(ctx::fail);
consumer = createConsumer(context, config); consumer.exceptionHandler(ctx::fail);