@Before public void setup() { this.metrics = new Metrics(config, Arrays.asList((MetricsReporter) new JmxReporter()), time, true); }
_offlineRunnables = new ConcurrentHashMap<>(); List<MetricsReporter> reporters = new ArrayList<>(); reporters.add(new JmxReporter(JMX_PREFIX)); Metrics metrics = new Metrics(new MetricConfig(), reporters, new SystemTime()); metrics.addMetric(metrics.metricName("offline-runnable-count", METRIC_GROUP_NAME, "The number of Service/App that are not fully running"),
public static void initialize() { final MetricConfig metricConfig = new MetricConfig() .samples(100) .timeWindow( 1000, TimeUnit.MILLISECONDS ); final List<MetricsReporter> reporters = new ArrayList<>(); reporters.add(new JmxReporter("io.confluent.ksql.metrics")); // Replace all static contents other than Time to ensure they are cleaned for tests that are // not aware of the need to initialize/cleanup this test, in case test processes are reused. // Tests aware of the class clean everything up properly to get the state into a clean state, // a full, fresh instantiation here ensures something like KsqlEngineMetricsTest running after // another test that used MetricsCollector without running cleanUp will behave correctly. metrics = new Metrics(metricConfig, reporters, new SystemTime()); collectorMap = new ConcurrentHashMap<>(); }
reporters.add(new JmxReporter(JMX_PREFIX)); Metrics metrics = new Metrics(metricConfig, reporters, new SystemTime()); Map<String, String> tags = new HashMap<>();
@Before public void setup() { config = new MetricConfig().eventWindow(50).samples(2); time = new MockTime(); metrics = new Metrics(config, Arrays.asList((MetricsReporter) new JmxReporter()), time, true); }
reporters.add(new JmxReporter(JMX_PREFIX)); Metrics metrics = new Metrics(metricConfig, reporters, new SystemTime()); Map<String, String> tags = new HashMap<>();
childTagsWithValues.put("child-tag", "child-tag-value"); try (Metrics inherited = new Metrics(new MetricConfig().tags(parentTagsWithValues), Arrays.asList((MetricsReporter) new JmxReporter()), time, true)) { MetricName inheritedMetric = inherited.metricInstance(SampleMetrics.METRIC_WITH_INHERITED_TAGS, childTagsWithValues);
MBeanServer server = ManagementFactory.getPlatformMBeanServer(); try { JmxReporter reporter = new JmxReporter(); metrics.addReporter(reporter);
MBeanServer server = ManagementFactory.getPlatformMBeanServer(); try { metrics.addReporter(new JmxReporter());
@Test public void testExpiredSensor() { MetricConfig config = new MetricConfig(); Time mockTime = new MockTime(); Metrics metrics = new Metrics(config, Arrays.asList((MetricsReporter) new JmxReporter()), mockTime, true); long inactiveSensorExpirationTimeSeconds = 60L; Sensor sensor = new Sensor(metrics, "sensor", null, config, mockTime, inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel.INFO); assertTrue(sensor.add(metrics.metricName("test1", "grp1"), new Avg())); Map<String, String> emptyTags = Collections.emptyMap(); MetricName rateMetricName = new MetricName("rate", "test", "", emptyTags); MetricName totalMetricName = new MetricName("total", "test", "", emptyTags); Meter meter = new Meter(rateMetricName, totalMetricName); assertTrue(sensor.add(meter)); mockTime.sleep(TimeUnit.SECONDS.toMillis(inactiveSensorExpirationTimeSeconds + 1)); assertFalse(sensor.add(metrics.metricName("test3", "grp1"), new Avg())); assertFalse(sensor.add(meter)); metrics.close(); }
MetricsReporter.class, Collections.singletonMap(ProducerConfig.CLIENT_ID_CONFIG, clientId)); reporters.add(new JmxReporter(JMX_PREFIX)); this.metrics = new Metrics(metricConfig, reporters, time); this.partitioner = config.getConfiguredInstance(ProducerConfig.PARTITIONER_CLASS_CONFIG, Partitioner.class);
@Before public void setup() throws Exception { metrics = new Metrics(); metrics.addReporter(new JmxReporter()); sensor = metrics.sensor("kafka.requests"); countMetricName = metrics.metricName("pack.bean1.count", "grp1"); sensor.add(countMetricName, new Count()); sumMetricName = metrics.metricName("pack.bean1.sum", "grp1"); sensor.add(sumMetricName, new Sum()); }
List<MetricsReporter> reporters = config.getConfiguredInstances(ConsumerConfig.METRIC_REPORTER_CLASSES_CONFIG, MetricsReporter.class, Collections.singletonMap(ConsumerConfig.CLIENT_ID_CONFIG, clientId)); reporters.add(new JmxReporter(JMX_PREFIX)); this.metrics = new Metrics(metricConfig, reporters, time); this.retryBackoffMs = config.getLong(ConsumerConfig.RETRY_BACKOFF_MS_CONFIG);
.recordLevel(Sensor.RecordingLevel.forName(config.getString(AdminClientConfig.METRICS_RECORDING_LEVEL_CONFIG))) .tags(metricTags); reporters.add(new JmxReporter(JMX_PREFIX)); metrics = new Metrics(metricConfig, reporters, time); String metricGrpPrefix = "admin-client";
public AbstractMirusJmxReporter() { Metrics metrics = new Metrics(); metrics.addReporter(new JmxReporter("mirus")); this.metrics = metrics; }
public ConnectMetrics(String workerId, Time time, int numSamples, long sampleWindowMs, String metricsRecordingLevel, List<MetricsReporter> reporters) { this.workerId = workerId; this.time = time; MetricConfig metricConfig = new MetricConfig().samples(numSamples) .timeWindow(sampleWindowMs, TimeUnit.MILLISECONDS).recordLevel( Sensor.RecordingLevel.forName(metricsRecordingLevel)); reporters.add(new JmxReporter(JMX_PREFIX)); this.metrics = new Metrics(metricConfig, reporters, time); LOG.debug("Registering Connect metrics with JMX for worker '{}'", workerId); AppInfoParser.registerAppInfo(JMX_PREFIX, workerId, metrics); }
private KafkaConsumer(ConsumerConfig config, ConsumerRebalanceCallback callback, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { log.trace("Starting the Kafka consumer"); subscribedTopics = new HashSet<String>(); subscribedPartitions = new HashSet<TopicPartition>(); this.metrics = new Metrics(new MetricConfig(), Collections.singletonList((MetricsReporter) new JmxReporter("kafka.consumer.")), new SystemTime()); this.metadataFetchTimeoutMs = config.getLong(ConsumerConfig.METADATA_FETCH_TIMEOUT_CONFIG); this.totalMemorySize = config.getLong(ConsumerConfig.TOTAL_BUFFER_MEMORY_CONFIG); List<InetSocketAddress> addresses = ClientUtils.parseAndValidateAddresses(config.getList(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)); if (keyDeserializer == null) this.keyDeserializer = config.getConfiguredInstance(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, Deserializer.class); else this.keyDeserializer = keyDeserializer; if (valueDeserializer == null) this.valueDeserializer = config.getConfiguredInstance(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Deserializer.class); else this.valueDeserializer = valueDeserializer; config.logUnused(); log.debug("Kafka consumer started"); }
.tags(metricsTags); List<MetricsReporter> reporters = config.getConfiguredInstances(CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG, MetricsReporter.class); reporters.add(new JmxReporter(JMX_PREFIX)); this.metrics = new Metrics(metricConfig, reporters, time); this.retryBackoffMs = config.getLong(CommonClientConfigs.RETRY_BACKOFF_MS_CONFIG);
MetricsReporter.class, Collections.singletonMap(CommonClientConfigs.CLIENT_ID_CONFIG, clientId)); reporters.add(new JmxReporter(JMX_PREFIX)); this.metrics = new Metrics(metricConfig, reporters, time); this.retryBackoffMs = config.getLong(CommonClientConfigs.RETRY_BACKOFF_MS_CONFIG);
List<MetricsReporter> reporters = config.getConfiguredInstances(ProducerConfig.METRIC_REPORTER_CLASSES_CONFIG, MetricsReporter.class); reporters.add(new JmxReporter(jmxPrefix)); this.metrics = new Metrics(metricConfig, reporters, time); this.partitioner = new Partitioner();