@Override public void gauge(String metricName, long value) { metricsContext.gauge(metricName, value); } }
@Override public void gauge(String metricName, long value) { metricsContext.gauge(metricName, value); } }
protected void sendMetrics(Map<String, String> context, int containers, int memory, int vcores) { LOG.trace("Reporting resources: (containers, memory, vcores) = ({}, {}, {})", containers, memory, vcores); MetricsContext metricsContext = programMetricsCollectors.getUnchecked(context); metricsContext.gauge(METRIC_CONTAINERS, containers); metricsContext.gauge(METRIC_MEMORY_USAGE, memory); metricsContext.gauge(METRIC_VIRTUAL_CORE_USAGE, vcores); }
protected void sendMetrics(Map<String, String> context, int containers, int memory, int vcores) { LOG.trace("Reporting resources: (containers, memory, vcores) = ({}, {}, {})", containers, memory, vcores); MetricsContext metricsContext = programMetricsCollectors.getUnchecked(context); metricsContext.gauge(METRIC_CONTAINERS, containers); metricsContext.gauge(METRIC_MEMORY_USAGE, memory); metricsContext.gauge(METRIC_VIRTUAL_CORE_USAGE, vcores); }
@Override public void histogram(String metricName, int value) { // TODO: change when CDAP metrics supports histograms: CDAP-3120 metricsContext.get().gauge(metricName, value); }
@Override public void histogram(String metricName, int value) { // TODO: change when CDAP metrics supports histograms: CDAP-3120 metricsContext.get().gauge(metricName, value); }
@Override public void gauge(String metricName, long value) { metricsContext.gauge(String.format("%s.%s", metricsPrefix, metricName), value); } }
@Override public void gauge(String metricName, long value) { metricsContext.gauge(String.format("%s.%s", metricsPrefix, metricName), value); } }
@Override public void gauge(String metricName, int value, String...tags) { metricsContext.get().gauge(metricName, value); }
@Override public void gauge(String metricName, int value, String...tags) { metricsContext.get().gauge(metricName, value); }
private void emitConfigMetrics() { metricsContext.gauge("max.buffer.size", config.getMaxBufferSize()); metricsContext.gauge("event.delay.millis", config.getEventDelayMillis()); metricsContext.gauge("kafka.fetch.buffer.size", config.getKafkaFetchBufferSize()); metricsContext.gauge("checkpoint.interval.millis", config.getCheckpointIntervalMillis()); }
private void emitConfigMetrics() { metricsContext.gauge("max.buffer.size", config.getMaxBufferSize()); metricsContext.gauge("event.delay.millis", config.getEventDelayMillis()); metricsContext.gauge("kafka.fetch.buffer.size", config.getKafkaFetchBufferSize()); metricsContext.gauge("checkpoint.interval.millis", config.getCheckpointIntervalMillis()); }
@Override public HandlerTaskExecutor load(Thread key) throws Exception { HandlerTaskExecutor executor = handlerExecutorPool.poll(); if (executor == null) { return createTaskExecutor(instantiatorFactory); } programMetricsContext.gauge("context.pool.size", handlerExecutorSize.decrementAndGet()); return executor; } });
@Override public HandlerTaskExecutor load(Thread key) throws Exception { HandlerTaskExecutor executor = handlerExecutorPool.poll(); if (executor == null) { return createTaskExecutor(instantiatorFactory); } programMetricsContext.gauge("context.pool.size", handlerExecutorSize.decrementAndGet()); return executor; } });
public void reportMetrics() { metricsContext.gauge(MapReduceMetrics.METRIC_TASK_COMPLETION, (long) (taskContext.getProgress() * 100)); for (Map.Entry<String, TaskCounter> counterEntry : getTaskCounters().entrySet()) { metricsContext.gauge(counterEntry.getKey(), getTaskCounter(counterEntry.getValue())); } }
public void reportMetrics() { metricsContext.gauge(MapReduceMetrics.METRIC_TASK_COMPLETION, (long) (taskContext.getProgress() * 100)); for (Map.Entry<String, TaskCounter> counterEntry : getTaskCounters().entrySet()) { metricsContext.gauge(counterEntry.getKey(), getTaskCounter(counterEntry.getValue())); } }
private void report(Map<TableId, LevelDBTableService.TableStats> datasetStat) throws DatasetManagementException { for (Map.Entry<TableId, LevelDBTableService.TableStats> statEntry : datasetStat.entrySet()) { String namespace = statEntry.getKey().getNamespace(); // emit metrics for only user datasets, tables in system namespace are ignored if (NamespaceId.SYSTEM.getNamespace().equals(namespace)) { continue; } String tableName = statEntry.getKey().getTableName(); Collection<DatasetSpecificationSummary> instances = dsFramework.getInstances(new NamespaceId(namespace)); for (DatasetSpecificationSummary spec : instances) { DatasetSpecification specification = dsFramework.getDatasetSpec(new DatasetId(namespace, spec.getName())); if (specification.isParent(tableName)) { MetricsContext collector = metricsService.getContext(ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, namespace, Constants.Metrics.Tag.DATASET, spec.getName())); int sizeInMb = (int) (statEntry.getValue().getDiskSizeBytes() / BYTES_IN_MB); collector.gauge("dataset.size.mb", sizeInMb); break; } } } }
private void report(Map<TableId, LevelDBTableService.TableStats> datasetStat) throws DatasetManagementException { for (Map.Entry<TableId, LevelDBTableService.TableStats> statEntry : datasetStat.entrySet()) { String namespace = statEntry.getKey().getNamespace(); // emit metrics for only user datasets, tables in system namespace are ignored if (NamespaceId.SYSTEM.getNamespace().equals(namespace)) { continue; } String tableName = statEntry.getKey().getTableName(); Collection<DatasetSpecificationSummary> instances = dsFramework.getInstances(new NamespaceId(namespace)); for (DatasetSpecificationSummary spec : instances) { DatasetSpecification specification = dsFramework.getDatasetSpec(new DatasetId(namespace, spec.getName())); if (specification.isParent(tableName)) { MetricsContext collector = metricsService.getContext(ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, namespace, Constants.Metrics.Tag.DATASET, spec.getName())); int sizeInMb = (int) (statEntry.getValue().getDiskSizeBytes() / BYTES_IN_MB); collector.gauge("dataset.size.mb", sizeInMb); break; } } } }