/** * @param metricName * @return standard JMX MBean name in the following format domainName:type=metricType,key1=val1,key2=val2 */ static String getMBeanName(String prefix, MetricName metricName) { StringBuilder mBeanName = new StringBuilder(); mBeanName.append(prefix); mBeanName.append(":type="); mBeanName.append(metricName.group()); for (Map.Entry<String, String> entry : metricName.tags().entrySet()) { if (entry.getKey().length() <= 0 || entry.getValue().length() <= 0) continue; mBeanName.append(","); mBeanName.append(entry.getKey()); mBeanName.append("="); mBeanName.append(Sanitizer.jmxSanitize(entry.getValue())); } return mBeanName.toString(); }
/** * Check if a kafkaMetric is an interested metric. */ public static boolean isInterested(org.apache.kafka.common.MetricName metricName) { String group = metricName.group(); String name = metricName.name(); String type = metricName.tags().get(TYPE_KEY); return isInterested(group, name, type, metricName.tags()); }
/** * Convert a KafkaMetric to a CruiseControlMetric */ public static CruiseControlMetric toCruiseControlMetric(KafkaMetric kafkaMetric, long now, int brokerId) { org.apache.kafka.common.MetricName metricName = kafkaMetric.metricName(); CruiseControlMetric ccm = toCruiseControlMetric(now, brokerId, metricName.name(), metricName.tags(), kafkaMetric.value()); if (ccm == null) { throw new IllegalArgumentException(String.format("Cannot convert KafkaMetric %s to a Cruise Control metric for " + "broker %d at time %d", kafkaMetric.metricName(), brokerId, now)); } return ccm; }
MetricName inheritedMetric = inherited.metricInstance(SampleMetrics.METRIC_WITH_INHERITED_TAGS, childTagsWithValues); Map<String, String> filledOutTags = inheritedMetric.tags(); assertEquals("parent-tag should be set properly", filledOutTags.get("parent-tag"), "parent-tag-value"); assertEquals("child-tag should be set properly", filledOutTags.get("child-tag"), "child-tag-value");
@Test public void testSenderMetricsTemplates() throws Exception { metrics.close(); Map<String, String> clientTags = Collections.singletonMap("client-id", "clientA"); metrics = new Metrics(new MetricConfig().tags(clientTags)); SenderMetricsRegistry metricsRegistry = new SenderMetricsRegistry(metrics); Sender sender = new Sender(logContext, client, metadata, this.accumulator, false, MAX_REQUEST_SIZE, ACKS_ALL, 1, metricsRegistry, time, REQUEST_TIMEOUT, 50, null, apiVersions); // Append a message so that topic metrics are created accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT); sender.run(time.milliseconds()); // connect sender.run(time.milliseconds()); // send produce request client.respond(produceResponse(tp0, 0, Errors.NONE, 0)); sender.run(time.milliseconds()); // Create throttle time metrics Sender.throttleTimeSensor(metricsRegistry); // Verify that all metrics except metrics-count have registered templates Set<MetricNameTemplate> allMetrics = new HashSet<>(); for (MetricName n : metrics.metrics().keySet()) { if (!n.group().equals("kafka-metrics-count")) allMetrics.add(new MetricNameTemplate(n.name(), n.group(), "", n.tags().keySet())); } TestUtils.checkEquals(allMetrics, new HashSet<>(metricsRegistry.allTemplates()), "metrics", "templates"); }
@Test public void testFetcherMetricsTemplates() throws Exception { metrics.close(); Map<String, String> clientTags = Collections.singletonMap("client-id", "clientA"); metrics = new Metrics(new MetricConfig().tags(clientTags)); metricsRegistry = new FetcherMetricsRegistry(clientTags.keySet(), "consumer" + groupId); fetcher.close(); fetcher = createFetcher(subscriptions, metrics); // Fetch from topic to generate topic metrics subscriptions.assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 0); assertEquals(1, fetcher.sendFetches()); client.prepareResponse(fullFetchResponse(tp0, this.records, Errors.NONE, 100L, 0)); consumerClient.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords = fetcher.fetchedRecords(); assertTrue(partitionRecords.containsKey(tp0)); // Create throttle metrics Fetcher.throttleTimeSensor(metrics, metricsRegistry); // Verify that all metrics except metrics-count have registered templates Set<MetricNameTemplate> allMetrics = new HashSet<>(); for (MetricName n : metrics.metrics().keySet()) { String name = n.name().replaceAll(tp0.toString(), "{topic}-{partition}"); if (!n.group().equals("kafka-metrics-count")) allMetrics.add(new MetricNameTemplate(name, n.group(), "", n.tags().keySet())); } TestUtils.checkEquals(allMetrics, new HashSet<>(metricsRegistry.getAllTemplates()), "metrics", "templates"); }
@Override public Map<String, Map<MetricName, ? extends Metric>> metrics() { ListenerConsumer listenerConsumerForMetrics = this.listenerConsumer; if (listenerConsumerForMetrics != null) { Map<MetricName, ? extends Metric> metrics = listenerConsumerForMetrics.consumer.metrics(); Iterator<MetricName> metricIterator = metrics.keySet().iterator(); if (metricIterator.hasNext()) { String clientId = metricIterator.next().tags().get("client-id"); return Collections.singletonMap(clientId, metrics); } } return Collections.emptyMap(); }
private synchronized void checkAssignments() { logger.debug("Checking partition assignments"); try { KafkaConsumer<String, String> consumer = consumers.iterator().next(); Map<MetricName, ? extends Metric> metrics = consumer.metrics(); for (MetricName name : metrics.keySet()) { if ("assigned-partitions".equals(name.name())) { Metric metric = metrics.get(name); Map<String, String> tags = name.tags(); String clientId = tags.get("client-id"); int partitionCount = ((Double)metric.metricValue()).intValue(); processDataPoint(clientId, partitionCount, Instant.now()); } } } catch (NoSuchElementException ex) { } }
/** * Determine if the supplied metric name is part of this group identifier. * * @param metricName the metric name * @return true if the metric name's group and tags match this group identifier, or false otherwise */ public boolean includes(MetricName metricName) { return metricName != null && groupName.equals(metricName.group()) && tags.equals(metricName.tags()); }
public boolean matches(MetricName name, KafkaMetric metric) { Boolean excluded = pattern.matcher(name.name()).matches(); // Collect client aggregated metrics only. So we need to exclude topic-level or broker-level metrics, including: // 1. exclude metrics whose `tags` contain "topic" and "node-id". // 2. exclude metrics whose MetricName format is '{topic}-{partition}.xxx', like test-1.records-lag-avg. Boolean topicLevelMetrics = name.tags().containsKey("topic") || name.tags().containsKey("node-id") || name.name().contains("."); return !excluded && !topicLevelMetrics; } }
/** * Check if a kafkaMetric is an interested metric. */ public static boolean isInterested(org.apache.kafka.common.MetricName metricName) { String group = metricName.group(); String name = metricName.name(); String type = metricName.tags().get(TYPE_KEY); return isInterested(group, name, type, metricName.tags()); }
/** * @param metricName * @return standard JMX MBean name in the following format * domainName:type=metricType,key1=val1,key2=val2 */ private String getMBeanName(MetricName metricName) { StringBuilder mBeanName = new StringBuilder(); mBeanName.append(prefix); mBeanName.append(":type="); mBeanName.append(metricName.group()); for (Map.Entry<String, String> entry : metricName.tags().entrySet()) { if(entry.getKey().length() <= 0 || entry.getValue().length() <= 0) continue; mBeanName.append(","); mBeanName.append(entry.getKey()); mBeanName.append("="); mBeanName.append(entry.getValue()); } return mBeanName.toString(); }
private static String dropwizardMetricName(KafkaMetric kafkaMetric) { MetricName name = kafkaMetric.metricName(); List<String> nameParts = new ArrayList<String>(2); nameParts.add(name.group()); nameParts.addAll(name.tags().values()); nameParts.add(name.name()); StringBuilder builder = new StringBuilder(); for (String namePart : nameParts) { builder.append(namePart); builder.append("."); } builder.setLength(builder.length() - 1); // Remove the trailing dot. String processedName = builder.toString().replace(' ', '_').replace("\\.", "_"); return MetricRegistry.name(METRIC_PREFIX, processedName); }
private void registerLatencyPerBrokerGauge(Producer<byte[], byte[]> producer, HermesMetrics metrics, String metricName, String producerName, Node node) { String gauge = Gauges.JMX_PREFIX + "." + producerName + "-" + metricName + "." + escapeDots(node.host()); registerGauge(producer, metrics, gauge, entry -> entry.getKey().group().equals("producer-node-metrics") && entry.getKey().name().equals(metricName) && entry.getKey().tags().containsValue("node-" + node.id())); }
metricMsg.put("group.id", "empty"); for (Map.Entry<String, String> tag : metricName.tags().entrySet()) { metricMsg.put("tag." + tag.getKey(), tag.getValue());
metric -> { final StringBuilder sb = new StringBuilder("Kafka."); for (final Map.Entry<String, String> tag : new TreeMap<>(metric.tags()).entrySet()) { if (!tag.getKey().isEmpty() && !tag.getValue().isEmpty()) { sb.append(tag.getValue()).append('.');
/** * Convert a KafkaMetric to a CruiseControlMetric */ public static CruiseControlMetric toCruiseControlMetric(KafkaMetric kafkaMetric, long now, int brokerId) { org.apache.kafka.common.MetricName metricName = kafkaMetric.metricName(); CruiseControlMetric ccm = toCruiseControlMetric(now, brokerId, metricName.name(), metricName.tags(), kafkaMetric.value()); if (ccm == null) { throw new IllegalArgumentException(String.format("Cannot convert KafkaMetric %s to a Cruise Control metric for " + "broker %d at time %d", kafkaMetric.metricName(), brokerId, now)); } return ccm; }
private void addMetric(KafkaMetric metric) { MetricName metricName = metric.metricName(); MonitorConfig.Builder builder = MonitorConfig.builder(metricName.name()) .withTag("group", metricName.group()); for(Map.Entry<String, String> tag : metricName.tags().entrySet()) { builder.withTag(tag.getKey(), tag.getValue()); } MonitorConfig monitorConfig = builder.build(); gauges.put(Servo.getDoubleGauge(monitorConfig), metric); }
Map<String, String> tags = new HashMap<String, String>(fixedTags); tags.put("group", m.getKey().group()); for (Map.Entry<String, String> tag : m.getValue().metricName().tags().entrySet()) { tags.put(tag.getKey(), tag.getValue());