/** * Builds and returns {@link KafkaReporter}. * * @param brokers string of Kafka brokers * @param topic topic to send metrics to * @return KafkaReporter */ public KafkaReporter build(String brokers, String topic, Properties props) throws IOException { this.brokers = brokers; this.topic = topic; // create a KafkaReporter with metrics.* and gobblin.kafka.sharedConfig.* keys return new KafkaReporter(this, KafkaReporter.getKafkaAndMetricsConfigFromProperties(props)); } }
protected KafkaReporter(Builder<?> builder, Config config) throws IOException { super(builder, config); SchemaVersionWriter versionWriter; if (config.hasPath(SCHEMA_VERSION_WRITER_TYPE)) { try { ClassAliasResolver<SchemaVersionWriter> resolver = new ClassAliasResolver<>(SchemaVersionWriter.class); Class<? extends SchemaVersionWriter> klazz = resolver.resolveClass(config.getString(SCHEMA_VERSION_WRITER_TYPE)); versionWriter = klazz.newInstance(); } catch (ReflectiveOperationException roe) { throw new IOException("Could not instantiate version writer.", roe); } } else { versionWriter = new FixedSchemaVersionWriter(); } log.info("Schema version writer: " + versionWriter.getClass().getName()); this.serializer = this.closer.register(createSerializer(versionWriter)); if (builder.kafkaPusher.isPresent()) { this.kafkaPusher = builder.kafkaPusher.get(); } else { Config kafkaConfig = ConfigUtils.getConfigOrEmpty(config, PusherUtils.METRICS_REPORTING_KAFKA_CONFIG_PREFIX) .withFallback(ConfigUtils.getConfigOrEmpty(config, ConfigurationKeys.SHARED_KAFKA_CONFIG_PREFIX)); String pusherClassName = ConfigUtils.getString(config, PusherUtils.KAFKA_PUSHER_CLASS_NAME_KEY, PusherUtils.DEFAULT_KAFKA_PUSHER_CLASS_NAME); this.kafkaPusher = PusherUtils.getPusher(pusherClassName, builder.brokers, builder.topic, Optional.of(kafkaConfig)); } this.closer.register(this.kafkaPusher); }
/** * Builds and returns {@link KafkaAvroReporter}. * * @param brokers string of Kafka brokers * @param topic topic to send metrics to * @return KafkaAvroReporter */ public KafkaAvroReporter build(String brokers, String topic, Properties props) throws IOException { this.brokers = brokers; this.topic = topic; // create a KafkaAvroReporter with metrics.* and gobblin.kafka.sharedConfig.* keys return new KafkaAvroReporter(this, KafkaReporter.getKafkaAndMetricsConfigFromProperties(props)); } }
@Test public void kafkaReporterContextTest() throws IOException { Tag<?> tag1 = new Tag<>("tag1", "value1"); MetricContext context = MetricContext.builder("context").addTag(tag1).build(); Counter counter = context.counter("com.linkedin.example.counter"); MockKafkaPusher pusher = new MockKafkaPusher(); KafkaReporter kafkaReporter = getBuilderFromContext(pusher).build("localhost:0000", "topic", new Properties()); counter.inc(); kafkaReporter.report(context); try { Thread.sleep(1000); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } MetricReport metricReport = nextReport(pusher.messageIterator()); Assert.assertEquals(3, metricReport.getTags().size()); Assert.assertTrue(metricReport.getTags().containsKey(tag1.getKey())); Assert.assertEquals(metricReport.getTags().get(tag1.getKey()), tag1.getValue().toString()); }
/** * Builds and returns {@link KafkaAvroReporter}. * * @param brokers string of Kafka brokers * @param topic topic to send metrics to * @return KafkaAvroReporter */ public KafkaAvroReporter build(String brokers, String topic, Properties props) throws IOException { this.brokers = brokers; this.topic = topic; // create a KafkaAvroReporter with metrics.* and gobblin.kafka.sharedConfig.* keys return new KafkaAvroReporter(this, KafkaReporter.getKafkaAndMetricsConfigFromProperties(props)); } }
@Test public void kafkaReporterTagsTest() throws IOException { MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName() + ".kafkaReporterTagsTest").build(); Counter counter = metricContext.counter("com.linkedin.example.counter"); Tag<?> tag1 = new Tag<>("tag1", "value1"); Tag<?> tag2 = new Tag<>("tag2", 2); MockKafkaPusher pusher = new MockKafkaPusher(); KafkaReporter kafkaReporter = getBuilder(pusher).withTags(Lists.newArrayList(tag1, tag2)).build("localhost:0000", "topic", new Properties()); counter.inc(); kafkaReporter.report(metricContext); try { Thread.sleep(1000); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } MetricReport metricReport = nextReport(pusher.messageIterator()); Assert.assertEquals(4, metricReport.getTags().size()); Assert.assertTrue(metricReport.getTags().containsKey(tag1.getKey())); Assert.assertEquals(metricReport.getTags().get(tag1.getKey()), tag1.getValue().toString()); Assert.assertTrue(metricReport.getTags().containsKey(tag2.getKey())); Assert.assertEquals(metricReport.getTags().get(tag2.getKey()), tag2.getValue().toString()); }
/** * Builds and returns {@link KafkaReporter}. * * @param brokers string of Kafka brokers * @param topic topic to send metrics to * @return KafkaReporter */ public KafkaReporter build(String brokers, String topic, Properties props) throws IOException { this.brokers = brokers; this.topic = topic; // create a KafkaReporter with metrics.* and gobblin.kafka.sharedConfig.* keys return new KafkaReporter(this, KafkaReporter.getKafkaAndMetricsConfigFromProperties(props)); } }
protected KafkaReporter(Builder<?> builder, Config config) throws IOException { super(builder, config); SchemaVersionWriter versionWriter; if (config.hasPath(SCHEMA_VERSION_WRITER_TYPE)) { try { ClassAliasResolver<SchemaVersionWriter> resolver = new ClassAliasResolver<>(SchemaVersionWriter.class); Class<? extends SchemaVersionWriter> klazz = resolver.resolveClass(config.getString(SCHEMA_VERSION_WRITER_TYPE)); versionWriter = klazz.newInstance(); } catch (ReflectiveOperationException roe) { throw new IOException("Could not instantiate version writer.", roe); } } else { versionWriter = new FixedSchemaVersionWriter(); } log.info("Schema version writer: " + versionWriter.getClass().getName()); this.serializer = this.closer.register(createSerializer(versionWriter)); if (builder.kafkaPusher.isPresent()) { this.kafkaPusher = builder.kafkaPusher.get(); } else { Config kafkaConfig = ConfigUtils.getConfigOrEmpty(config, PusherUtils.METRICS_REPORTING_KAFKA_CONFIG_PREFIX) .withFallback(ConfigUtils.getConfigOrEmpty(config, ConfigurationKeys.SHARED_KAFKA_CONFIG_PREFIX)); String pusherClassName = ConfigUtils.getString(config, PusherUtils.KAFKA_PUSHER_CLASS_NAME_KEY, PusherUtils.DEFAULT_KAFKA_PUSHER_CLASS_NAME); this.kafkaPusher = PusherUtils.getPusher(pusherClassName, builder.brokers, builder.topic, Optional.of(kafkaConfig)); } this.closer.register(this.kafkaPusher); }