@Override public void start() throws Exception { systemMBean = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class); // A random identifier String pid = UUID.randomUUID().toString(); // Get the kafka producer config JsonObject config = config(); // Create the producer producer = KafkaWriteStream.create(vertx, config.getMap(), String.class, JsonObject.class); // Publish the metircs in Kafka vertx.setPeriodic(1000, id -> { JsonObject metrics = new JsonObject(); metrics.put("CPU", systemMBean.getProcessCpuLoad()); metrics.put("Mem", systemMBean.getTotalPhysicalMemorySize() - systemMBean.getFreePhysicalMemorySize()); producer.write(new ProducerRecord<>("the_topic", new JsonObject().put(pid, metrics))); }); }
static <K, V> KafkaWriteStream<K, V> producer(Vertx vertx, Producer<K, V> producer) { return KafkaWriteStream.create(vertx, producer); } }
static <K, V> KafkaWriteStream<K, V> producer(Vertx vertx, Producer<K, V> producer) { return KafkaWriteStream.create(vertx, producer); } }
public static <K, V> KafkaProducer<K, V> createShared(Vertx vertx, String name, Map<String, String> config, Class<K> keyType, Class<V> valueType) { return createShared(vertx, name, () -> KafkaWriteStream.create(vertx, new HashMap<>(config), keyType, valueType)); }
static <K, V> KafkaWriteStream<K, V> producer(Vertx vertx, Properties config, Class<K> keyType, Class<V> valueType) { return KafkaWriteStream.create(vertx, config, keyType, valueType); }
static <K, V> KafkaWriteStream<K, V> producer(Vertx vertx, Properties config) { return KafkaWriteStream.create(vertx, config); }
public static <K, V> KafkaProducer<K, V> createShared(Vertx vertx, String name, Map<String, String> config) { return createShared(vertx, name, () -> KafkaWriteStream.create(vertx, new HashMap<>(config))); }
public static <K, V> KafkaProducer<K, V> createShared(Vertx vertx, String name, Properties config, Class<K> keyType, Class<V> valueType) { return createShared(vertx, name, () -> KafkaWriteStream.create(vertx, config, keyType, valueType)); }
static <K, V> KafkaWriteStream<K, V> producer(Vertx vertx, Properties config) { return KafkaWriteStream.create(vertx, config); }
public static <K, V> KafkaProducer<K, V> createShared(Vertx vertx, String name, Properties config) { return createShared(vertx, name, () -> KafkaWriteStream.create(vertx, config)); }
static <K, V> KafkaWriteStream<K, V> producer(Vertx vertx, Properties config, Class<K> keyType, Class<V> valueType) { return KafkaWriteStream.create(vertx, config, keyType, valueType); }
/** * Create a new KafkaProducer instance * * @param vertx Vert.x instance to use * @param config Kafka producer configuration * @return an instance of the KafkaProducer */ static <K, V> KafkaProducer<K, V> create(Vertx vertx, Map<String, String> config) { KafkaWriteStream<K, V> stream = KafkaWriteStream.create(vertx, new HashMap<>(config)); return new KafkaProducerImpl<>(stream).registerCloseHook(); }
/** * Create a new KafkaProducer instance * * @param vertx Vert.x instance to use * @param config Kafka producer configuration * @param keyType class type for the key serialization * @param valueType class type for the value serialization * @return an instance of the KafkaProducer */ static <K, V> KafkaProducer<K, V> create(Vertx vertx, Map<String, String> config, Class<K> keyType, Class<V> valueType) { KafkaWriteStream<K, V> stream = KafkaWriteStream.create(vertx, new HashMap<>(config), keyType, valueType); return new KafkaProducerImpl<>(stream).registerCloseHook(); }
/** * Create a new KafkaProducer instance from a native {@link Producer}. * * @param vertx Vert.x instance to use * @param producer the Kafka producer to wrap * @return an instance of the KafkaProducer */ @GenIgnore static <K, V> KafkaProducer<K, V> create(Vertx vertx, Producer<K, V> producer) { KafkaWriteStream<K, V> stream = KafkaWriteStream.create(vertx, producer); return new KafkaProducerImpl<>(stream); }
/** * Create a new KafkaProducer instance * * @param vertx Vert.x instance to use * @param config Kafka producer configuration * @return an instance of the KafkaProducer */ @GenIgnore static <K, V> KafkaProducer<K, V> create(Vertx vertx, Properties config) { KafkaWriteStream<K, V> stream = KafkaWriteStream.create(vertx, config); return new KafkaProducerImpl<>(stream).registerCloseHook(); }
@Override public void start() throws Exception { systemMBean = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class); // A random identifier String pid = UUID.randomUUID().toString(); // Get the kafka producer config JsonObject config = config(); // Create the producer producer = KafkaWriteStream.create(vertx.getDelegate(), config.getMap(), String.class, JsonObject.class); // Publish the metircs in Kafka vertx.setPeriodic(1000, id -> { JsonObject metrics = new JsonObject(); metrics.put("CPU", systemMBean.getProcessCpuLoad()); metrics.put("Mem", systemMBean.getTotalPhysicalMemorySize() - systemMBean.getFreePhysicalMemorySize()); producer.write(new ProducerRecord<>("the_topic", new JsonObject().put(pid, metrics))); }); }
/** * Create a new KafkaProducer instance * * @param vertx Vert.x instance to use * @param config Kafka producer configuration * @param keyType class type for the key serialization * @param valueType class type for the value serialization * @return an instance of the KafkaProducer */ @GenIgnore static <K, V> KafkaProducer<K, V> create(Vertx vertx, Properties config, Class<K> keyType, Class<V> valueType) { KafkaWriteStream<K, V> stream = KafkaWriteStream.create(vertx, config, keyType, valueType); return new KafkaProducerImpl<>(stream).registerCloseHook(); }
@Test public void testBufferCodecString(TestContext ctx) throws Exception { testCodec(ctx, "testBufferCodecString", cfg -> { cfg.put("key.serializer", BufferSerializer.class); cfg.put("value.serializer", BufferSerializer.class); return KafkaWriteStream.create(vertx, cfg); }, cfg -> { cfg.put("key.deserializer", BufferDeserializer.class); cfg.put("value.deserializer", BufferDeserializer.class); return KafkaReadStream.create(vertx, cfg); }, i -> Buffer.buffer("key-" + i), i -> Buffer.buffer("value-" + i)); }
@Test public void testBufferCodecString(TestContext ctx) throws Exception { testCodec(ctx, "testBufferCodecString", cfg -> { cfg.put("key.serializer", BufferSerializer.class); cfg.put("value.serializer", BufferSerializer.class); return KafkaWriteStream.create(vertx, cfg); }, cfg -> { cfg.put("key.deserializer", BufferDeserializer.class); cfg.put("value.deserializer", BufferDeserializer.class); return KafkaReadStream.create(vertx, cfg); }, i -> Buffer.buffer("key-" + i), i -> Buffer.buffer("value-" + i)); }
private static <K, V> KafkaProducer<K, V> createShared(Vertx vertx, String name, Supplier<KafkaWriteStream> streamFactory) { synchronized (sharedProducers) { SharedProducer sharedProducer = sharedProducers.computeIfAbsent(name, key -> { KafkaWriteStream stream = streamFactory.get(); SharedProducer s = new SharedProducer(stream); s.closeHandler.registerCloseHook((VertxInternal) vertx); return s; }); Object key = new Object(); KafkaProducerImpl<K, V> producer = new KafkaProducerImpl<>(KafkaWriteStream.create(vertx, sharedProducer.producer), new CloseHandler((timeout, ar) -> { synchronized (sharedProducers) { sharedProducer.remove(key); if (sharedProducer.isEmpty()) { sharedProducers.remove(name); sharedProducer.closeHandler.close(timeout, ar); return; } } ar.handle(Future.succeededFuture()); })); sharedProducer.put(key, producer); return producer.registerCloseHook(); } }