/** * Asynchronously write a record to a topic * @param record record to write * @param handler handler called on operation completed * @return current KafkaWriteStream instance */ public io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> write(io.vertx.rxjava.kafka.client.producer.KafkaProducerRecord<K, V> record, Handler<AsyncResult<RecordMetadata>> handler) { delegate.write(record.getDelegate(), handler); return this; }
/** * Close the producer * @param completionHandler handler called on operation completed */ public void close(Handler<AsyncResult<Void>> completionHandler) { delegate.close(completionHandler); }
public io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> exceptionHandler(Handler<Throwable> handler) { delegate.exceptionHandler(handler); return this; }
@Test public void testExceptionHandler(TestContext ctx) throws Exception { Async async = ctx.async(); Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); Date invalidValue = new Date(); KafkaProducer.create(Vertx.vertx(), props). exceptionHandler(exception -> async.complete()). write(KafkaProducerRecord.create("topic", "key", invalidValue)); }
@Override public void start(Future<Void> startFuture) throws Exception { Properties config = new Properties(); config.putAll(context.config().getMap()); KafkaProducer<String, String> producer = KafkaProducer.createShared(vertx, "the-name", config); producer.write(KafkaProducerRecord.create("the_topic", "the_value"), ar -> startFuture.handle(ar.map((Void) null))); } }
/** * Create a new KafkaProducer instance * @param vertx Vert.x instance to use * @param config Kafka producer configuration * @return an instance of the KafkaProducer */ public static <K, V> io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> create(io.vertx.rxjava.core.Vertx vertx, Map<String, String> config) { io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> ret = io.vertx.rxjava.kafka.client.producer.KafkaProducer.newInstance(io.vertx.kafka.client.producer.KafkaProducer.create(vertx.getDelegate(), config), io.vertx.lang.rx.TypeArg.unknown(), io.vertx.lang.rx.TypeArg.unknown()); return ret; }
/** * Get or create a KafkaProducer instance which shares its stream with any other KafkaProducer created with the same <code>name</code> * @param vertx Vert.x instance to use * @param name the producer name to identify it * @param config Kafka producer configuration * @return an instance of the KafkaProducer */ public static <K, V> io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> createShared(io.vertx.rxjava.core.Vertx vertx, String name, Map<String, String> config) { io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> ret = io.vertx.rxjava.kafka.client.producer.KafkaProducer.newInstance(io.vertx.kafka.client.producer.KafkaProducer.createShared(vertx.getDelegate(), name, config), io.vertx.lang.rx.TypeArg.unknown(), io.vertx.lang.rx.TypeArg.unknown()); return ret; }
/** * Get the partition metadata for the give topic. * @param topic topic partition for which getting partitions info * @param handler handler called on operation completed * @return current KafkaProducer instance */ public io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> partitionsFor(String topic, Handler<AsyncResult<List<PartitionInfo>>> handler) { delegate.partitionsFor(topic, handler); return this; }
public io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> drainHandler(Handler<Void> handler) { delegate.drainHandler(handler); return this; }
@Test public void testExceptionHandler(TestContext ctx) throws Exception { Async async = ctx.async(); Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); Date invalidValue = new Date(); KafkaProducer.create(Vertx.vertx(), props). exceptionHandler(exception -> async.complete()). write(KafkaProducerRecord.create("topic", "key", invalidValue)); }
@Override public void start(Future<Void> startFuture) throws Exception { Properties config = new Properties(); config.putAll(context.config().getMap()); KafkaProducer<String, String> producer = KafkaProducer.createShared(vertx, "the-name", config); producer.write(KafkaProducerRecord.create("the_topic", "the_value"), ar -> startFuture.handle(ar.map((Void) null))); } }
/** * Create a new KafkaProducer instance * @param vertx Vert.x instance to use * @param config Kafka producer configuration * @return an instance of the KafkaProducer */ public static <K, V> io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> create(io.vertx.rxjava.core.Vertx vertx, Map<String, String> config) { io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> ret = io.vertx.rxjava.kafka.client.producer.KafkaProducer.newInstance(io.vertx.kafka.client.producer.KafkaProducer.create(vertx.getDelegate(), config), io.vertx.lang.rx.TypeArg.unknown(), io.vertx.lang.rx.TypeArg.unknown()); return ret; }
/** * Get or create a KafkaProducer instance which shares its stream with any other KafkaProducer created with the same <code>name</code> * @param vertx Vert.x instance to use * @param name the producer name to identify it * @param config Kafka producer configuration * @return an instance of the KafkaProducer */ public static <K, V> io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> createShared(io.vertx.rxjava.core.Vertx vertx, String name, Map<String, String> config) { io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> ret = io.vertx.rxjava.kafka.client.producer.KafkaProducer.newInstance(io.vertx.kafka.client.producer.KafkaProducer.createShared(vertx.getDelegate(), name, config), io.vertx.lang.rx.TypeArg.unknown(), io.vertx.lang.rx.TypeArg.unknown()); return ret; }
/** * Get the partition metadata for the give topic. * @param topic topic partition for which getting partitions info * @param handler handler called on operation completed * @return current KafkaProducer instance */ public io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> partitionsFor(String topic, Handler<AsyncResult<List<PartitionInfo>>> handler) { delegate.partitionsFor(topic, handler); return this; }
public io.vertx.rxjava.kafka.client.producer.KafkaProducer<K, V> drainHandler(Handler<Void> handler) { delegate.drainHandler(handler); return this; }
@Test public void testWriteWithSimulatedError(TestContext ctx) { TestProducerWriteError mock = new TestProducerWriteError(); KafkaProducer<String, String> prod = KafkaProducer.create(vertx, mock); KafkaProducerRecord<String, String> record = KafkaProducerRecord.create("myTopic", "test"); vertx.exceptionHandler(h -> { if(!(h instanceof SimulatedWriteException)) { ctx.fail(h); } }); prod.write(record); } }
@Test public void testSharedProducer(TestContext ctx) { Properties config = kafkaCluster.useTo().getProducerProperties("the_producer"); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); int num = 3; Async sentLatch = ctx.async(num); LinkedList<KafkaProducer<String, String>> producers = new LinkedList<>(); for (int i = 0;i < num;i++) { KafkaProducer<String, String> producer = KafkaProducer.createShared(vertx, "the-name", config); producer.write(KafkaProducerRecord.create("the_topic", "the_value"), ctx.asyncAssertSuccess(v -> { sentLatch.countDown(); })); producers.add(producer); } sentLatch.awaitSuccess(10000); Async async = ctx.async(); kafkaCluster.useTo().consumeStrings("the_topic", num, 10, TimeUnit.SECONDS, () -> { close(ctx, producers, async::complete); }); async.awaitSuccess(10000); waitUntil(() -> countThreads("kafka-producer-network-thread") == numKafkaProducerNetworkThread); }
public static io.vertx.kafka.client.producer.KafkaProducer<java.lang.Object,java.lang.Object> write(io.vertx.kafka.client.producer.KafkaProducer<Object, Object> j_receiver, io.vertx.kafka.client.producer.KafkaProducerRecord<java.lang.Object,java.lang.Object> record, io.vertx.core.Handler<io.vertx.core.AsyncResult<java.util.Map<String, Object>>> handler) { io.vertx.core.impl.ConversionHelper.fromObject(j_receiver.write(record, handler != null ? new io.vertx.core.Handler<io.vertx.core.AsyncResult<io.vertx.kafka.client.producer.RecordMetadata>>() { public void handle(io.vertx.core.AsyncResult<io.vertx.kafka.client.producer.RecordMetadata> ar) { handler.handle(ar.map(event -> event != null ? io.vertx.core.impl.ConversionHelper.fromJsonObject(event.toJson()) : null)); } } : null)); return j_receiver; } public static io.vertx.kafka.client.producer.KafkaProducer<java.lang.Object,java.lang.Object> partitionsFor(io.vertx.kafka.client.producer.KafkaProducer<Object, Object> j_receiver, java.lang.String topic, io.vertx.core.Handler<io.vertx.core.AsyncResult<java.util.List<java.util.Map<String, Object>>>> handler) {