private KafkaMbean removeAttribute(KafkaMetric metric, String mBeanName) { MetricName metricName = metric.metricName(); KafkaMbean mbean = this.mbeans.get(mBeanName); if (mbean != null) mbean.removeAttribute(metricName.name()); return mbean; }
private KafkaMetric getMetric(String name) throws Exception { Optional<Map.Entry<MetricName, KafkaMetric>> metric = metrics.metrics().entrySet().stream() .filter(entry -> entry.getKey().name().equals(name)) .findFirst(); if (!metric.isPresent()) throw new Exception(String.format("Could not find metric called %s", name)); return metric.get().getValue(); } }
private KafkaMbean addAttribute(KafkaMetric metric) { try { MetricName metricName = metric.metricName(); String mBeanName = getMBeanName(prefix, metricName); if (!this.mbeans.containsKey(mBeanName)) mbeans.put(mBeanName, new KafkaMbean(mBeanName)); KafkaMbean mbean = this.mbeans.get(mBeanName); mbean.setAttribute(metricName.name(), metric); return mbean; } catch (JMException e) { throw new KafkaException("Error creating mbean attribute for metricName :" + metric.metricName(), e); } }
private Object getAttribute(MetricName metricName) throws Exception { return getAttribute(metricName, metricName.name()); }
public double metricValue(String name) { for (Map.Entry<MetricName, KafkaMetric> entry : metrics.metrics().entrySet()) { if (entry.getKey().name().equals(name)) return (double) entry.getValue().metricValue(); } throw new IllegalStateException("Metric not found, " + name + ", found=" + metrics.metrics().keySet()); }
Stat asStat() { return new Stat(metric.metricName().name(), value(), lastEvent); } }
/** * Check if a kafkaMetric is an interested metric. */ public static boolean isInterested(org.apache.kafka.common.MetricName metricName) { String group = metricName.group(); String name = metricName.name(); String type = metricName.tags().get(TYPE_KEY); return isInterested(group, name, type, metricName.tags()); }
@Test public void testGetAttributesWithUnknown() throws Exception { sensor.record(3.5); sensor.record(4.0); AttributeList attributeList = getAttributes(countMetricName, countMetricName.name(), sumMetricName.name(), "name"); List<Attribute> attributes = attributeList.asList(); assertEquals(2, attributes.size()); for (Attribute attribute : attributes) { if (countMetricName.name().equals(attribute.getName())) assertEquals(2.0, attribute.getValue()); else if (sumMetricName.name().equals(attribute.getName())) assertEquals(7.5, attribute.getValue()); else fail("Unexpected attribute returned: " + attribute.getName()); } }
/** * Convert a KafkaMetric to a CruiseControlMetric */ public static CruiseControlMetric toCruiseControlMetric(KafkaMetric kafkaMetric, long now, int brokerId) { org.apache.kafka.common.MetricName metricName = kafkaMetric.metricName(); CruiseControlMetric ccm = toCruiseControlMetric(now, brokerId, metricName.name(), metricName.tags(), kafkaMetric.value()); if (ccm == null) { throw new IllegalArgumentException(String.format("Cannot convert KafkaMetric %s to a Cruise Control metric for " + "broker %d at time %d", kafkaMetric.metricName(), brokerId, now)); } return ccm; }
@Test public void testGetAttributes() throws Exception { sensor.record(3.5); sensor.record(4.0); AttributeList attributeList = getAttributes(countMetricName, countMetricName.name(), sumMetricName.name()); List<Attribute> attributes = attributeList.asList(); assertEquals(2, attributes.size()); for (Attribute attribute : attributes) { if (countMetricName.name().equals(attribute.getName())) assertEquals(2.0, attribute.getValue()); else if (sumMetricName.name().equals(attribute.getName())) assertEquals(7.5, attribute.getValue()); else fail("Unexpected attribute returned: " + attribute.getName()); } }
final MetricGroup kafkaMetricGroup = getRuntimeContext().getMetricGroup().addGroup("KafkaProducer"); for (Map.Entry<MetricName, ? extends Metric> entry: metrics.entrySet()) { String name = entry.getKey().name(); Metric metric = entry.getValue();
final MetricGroup kafkaMetricGroup = getRuntimeContext().getMetricGroup().addGroup("KafkaProducer"); for (Map.Entry<MetricName, ? extends Metric> entry: metrics.entrySet()) { String name = entry.getKey().name(); Metric metric = entry.getValue();
final MetricGroup kafkaMetricGroup = getRuntimeContext().getMetricGroup().addGroup("KafkaProducer"); for (Map.Entry<MetricName, ? extends Metric> metric: metrics.entrySet()) { kafkaMetricGroup.gauge(metric.getKey().name(), new KafkaMetricWrapper(metric.getValue()));
@Test public void testSenderMetricsTemplates() throws Exception { metrics.close(); Map<String, String> clientTags = Collections.singletonMap("client-id", "clientA"); metrics = new Metrics(new MetricConfig().tags(clientTags)); SenderMetricsRegistry metricsRegistry = new SenderMetricsRegistry(metrics); Sender sender = new Sender(logContext, client, metadata, this.accumulator, false, MAX_REQUEST_SIZE, ACKS_ALL, 1, metricsRegistry, time, REQUEST_TIMEOUT, 50, null, apiVersions); // Append a message so that topic metrics are created accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT); sender.run(time.milliseconds()); // connect sender.run(time.milliseconds()); // send produce request client.respond(produceResponse(tp0, 0, Errors.NONE, 0)); sender.run(time.milliseconds()); // Create throttle time metrics Sender.throttleTimeSensor(metricsRegistry); // Verify that all metrics except metrics-count have registered templates Set<MetricNameTemplate> allMetrics = new HashSet<>(); for (MetricName n : metrics.metrics().keySet()) { if (!n.group().equals("kafka-metrics-count")) allMetrics.add(new MetricNameTemplate(n.name(), n.group(), "", n.tags().keySet())); } TestUtils.checkEquals(allMetrics, new HashSet<>(metricsRegistry.allTemplates()), "metrics", "templates"); }
@Test public void testFetcherMetricsTemplates() throws Exception { metrics.close(); Map<String, String> clientTags = Collections.singletonMap("client-id", "clientA"); metrics = new Metrics(new MetricConfig().tags(clientTags)); metricsRegistry = new FetcherMetricsRegistry(clientTags.keySet(), "consumer" + groupId); fetcher.close(); fetcher = createFetcher(subscriptions, metrics); // Fetch from topic to generate topic metrics subscriptions.assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 0); assertEquals(1, fetcher.sendFetches()); client.prepareResponse(fullFetchResponse(tp0, this.records, Errors.NONE, 100L, 0)); consumerClient.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords = fetcher.fetchedRecords(); assertTrue(partitionRecords.containsKey(tp0)); // Create throttle metrics Fetcher.throttleTimeSensor(metrics, metricsRegistry); // Verify that all metrics except metrics-count have registered templates Set<MetricNameTemplate> allMetrics = new HashSet<>(); for (MetricName n : metrics.metrics().keySet()) { String name = n.name().replaceAll(tp0.toString(), "{topic}-{partition}"); if (!n.group().equals("kafka-metrics-count")) allMetrics.add(new MetricNameTemplate(name, n.group(), "", n.tags().keySet())); } TestUtils.checkEquals(allMetrics, new HashSet<>(metricsRegistry.getAllTemplates()), "metrics", "templates"); }
private synchronized void checkAssignments() { logger.debug("Checking partition assignments"); try { KafkaConsumer<String, String> consumer = consumers.iterator().next(); Map<MetricName, ? extends Metric> metrics = consumer.metrics(); for (MetricName name : metrics.keySet()) { if ("assigned-partitions".equals(name.name())) { Metric metric = metrics.get(name); Map<String, String> tags = name.tags(); String clientId = tags.get("client-id"); int partitionCount = ((Double)metric.metricValue()).intValue(); processDataPoint(clientId, partitionCount, Instant.now()); } } } catch (NoSuchElementException ex) { } }
private void registerLatencyPerBrokerGauge(Producer<byte[], byte[]> producer, HermesMetrics metrics, String metricName, String producerName, Node node) { String gauge = Gauges.JMX_PREFIX + "." + producerName + "-" + metricName + "." + escapeDots(node.host()); registerGauge(producer, metrics, gauge, entry -> entry.getKey().group().equals("producer-node-metrics") && entry.getKey().name().equals(metricName) && entry.getKey().tags().containsValue("node-" + node.id())); }