@Override public void count(String metricName, int delta) { metricsContext.increment(metricName, delta); }
@Override public void count(String metricName, int delta) { metricsContext.increment(metricName, delta); }
@Override void doClose() { // Use compareAndSet to check if need to reduce weight. There will only be // one winner to proceed with the reduce weight call. if (needReduceWeight.compareAndSet(true, false)) { reduceWeight(); metricsContext.increment(METRICS_SCAN_REDUCE_WEIGHT, 1L); } } };
@Override public void rate(String metricName, int count) { metricsContext.get().increment(metricName, count); } }
@Override public void rate(String metricName, int count) { metricsContext.get().increment(metricName, count); } }
@Override void doClose() { // Use compareAndSet to check if need to reduce weight. There will only be // one winner to proceed with the reduce weight call. if (needReduceWeight.compareAndSet(true, false)) { reduceWeight(); metricsContext.increment(METRICS_SCAN_REDUCE_WEIGHT, 1L); } } };
@Override public void rate(String metricName) { metricsContext.get().increment(metricName, 1L); }
@Override public void increment(String metricName, long value) { metricsContext.increment(String.format("%s.%s", metricsPrefix, metricName), value); }
@VisibleForTesting void emitMetric() { metricsContext.increment(Constants.Metrics.Program.PROGRAM_NODE_MINUTES, intervalMinutes * numNodes); }
@Override public void increment(String metricName, long value) { metricsContext.increment(String.format("%s.%s", metricsPrefix, metricName), value); }
@Override public void rate(String metricName) { metricsContext.get().increment(metricName, 1L); }
@VisibleForTesting void emitMetric() { metricsContext.increment(Constants.Metrics.Program.PROGRAM_NODE_MINUTES, intervalMinutes * numNodes); }
@Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { if (!executor.isShutdown()) { streamHandlerMetricsContext.increment("collect.async.reject", 1); r.run(); } } };
@Override public boolean preCall(HttpRequest request, HttpResponder responder, HandlerInfo handlerInfo) { if (metricsCollectionService != null) { try { MetricsContext collector = collectorCache.get(createContext(handlerInfo)); collector.increment("request.received", 1); } catch (Throwable e) { LOG.error("Got exception while getting collector", e); } } return true; }
@Override public T apply(S source) { context.getQueueMetrics(queue).increment(eventsMetricsName, 1); context.getQueueMetrics(queue).increment("process.tuples.read", 1); if (producerAndQueue != null) { context.getProducerMetrics(producerAndQueue).increment("queue.pending", -1); } return inputDecoder.apply(source); } };
@Override public boolean preCall(HttpRequest request, HttpResponder responder, HandlerInfo handlerInfo) { if (metricsCollectionService != null) { try { MetricsContext collector = collectorCache.get(createContext(handlerInfo)); collector.increment("request.received", 1); } catch (Throwable e) { LOG.error("Got exception while getting collector", e); } } return true; }
private void gaugeEventProcessed(QueueName inputQueueName) { if (processEntry.isTick()) { flowletContext.getProgramMetrics().increment("process.ticks.processed", processedCount); } else if (inputQueueName == null) { flowletContext.getProgramMetrics().increment("process.events.processed", processedCount); } else { queueMetricsCollectors.getUnchecked(inputQueueName.getSimpleName()) .increment("process.events.processed", processedCount); } } };
@Override protected void startUp() throws Exception { LoggingContextAccessor.setLoggingContext(flowletContext.getLoggingContext()); flowletContext.getProgramMetrics().increment("process.instance", 1); flowletProcessDriver = new FlowletProcessDriver(flowletContext, dataFabricFacade, txCallback, processSpecs); serviceHook.startAndWait(); initFlowlet(); flowletProcessDriver.startAndWait(); LOG.info("Started Flowlet '{}' for Flow '{}'. Flowlet details: [{}]", flowletContext.getFlowletId(), flowletContext.getFlowId(), flowletContext); }