@Override public double get() { return (double) counter.getCount(); } };
@Override public void markEvent(long n) { this.counter.inc(n); }
private void updateCounters(Iterable<MetricResult<Long>> counters) { for (MetricResult<Long> metricResult : counters) { String flinkMetricName = getFlinkMetricNameString(COUNTER_PREFIX, metricResult); Long update = metricResult.getAttempted(); // update flink metric Counter counter = flinkCounterCache.computeIfAbsent( flinkMetricName, n -> runtimeContext.getMetricGroup().counter(n)); counter.dec(counter.getCount()); counter.inc(update); } }
private void updateCounters(Iterable<MetricResult<Long>> counters) { for (MetricResult<Long> metricResult : counters) { String flinkMetricName = getFlinkMetricNameString(metricResult); Long update = metricResult.getAttempted(); // update flink metric Counter counter = flinkCounterCache.computeIfAbsent( flinkMetricName, n -> runtimeContext.getMetricGroup().counter(n)); counter.dec(counter.getCount()); counter.inc(update); } }
@Override public void markEvent() { this.counter.inc(); }
@Override public long getCount() { return counter.getCount(); } }
private void updateCounters(Iterable<MetricResult<Long>> counters) { for (MetricResult<Long> metricResult : counters) { String flinkMetricName = getFlinkMetricNameString(COUNTER_PREFIX, metricResult); Long update = metricResult.attempted(); // update flink metric Counter counter = flinkCounterCache.get(flinkMetricName); if (counter == null) { counter = runtimeContext.getMetricGroup().counter(flinkMetricName); flinkCounterCache.put(flinkMetricName, counter); } counter.dec(counter.getCount()); counter.inc(update); } }
@Override public void onSuccess() { successfulCommits.inc(); }
/** * Visibility of this method must not be changed * since we deliberately not map it to json object in a Datadog-defined format. */ @Override public Number getMetricValue() { return counter.getCount(); } }
@Override public void onException(Throwable cause) { LOG.warn("Async Kafka commit failed.", cause); failedCommits.inc(); } };
@Override public void update() { time = (time + 1) % values.length; values[time] = counter.getCount(); currentRate = ((double) (values[time] - values[(time + 1) % values.length]) / timeSpanInSeconds); } }
@Override public void collect(StreamRecord<OUT> record) { numRecordsOut.inc(); output.collect(record); }
@Override public long getCount() { return this.counter.getCount(); } }
@Override public <X> void collect(OutputTag<X> outputTag, StreamRecord<X> record) { numRecordsOut.inc(); output.collect(outputTag, record); }
@Override public long getCount() { return counter.getCount(); }
protected <X> void pushToOperator(StreamRecord<X> record) { try { // we know that the given outputTag matches our OutputTag so the record // must be of the type that our operator expects. @SuppressWarnings("unchecked") StreamRecord<T> castRecord = (StreamRecord<T>) record; numRecordsIn.inc(); operator.setKeyContextElement1(castRecord); operator.processElement(castRecord); } catch (Exception e) { throw new ExceptionInChainedOperatorException(e); } }
private void reportCounter(final String name, final Counter counter) { send(name, String.valueOf(counter.getCount())); }
@Override protected <X> void pushToOperator(StreamRecord<X> record) { try { // we know that the given outputTag matches our OutputTag so the record // must be of the type that our operator (and Serializer) expects. @SuppressWarnings("unchecked") StreamRecord<T> castRecord = (StreamRecord<T>) record; numRecordsIn.inc(); StreamRecord<T> copy = castRecord.copy(serializer.copy(castRecord.getValue())); operator.setKeyContextElement1(copy); operator.processElement(copy); } catch (ClassCastException e) { if (outputTag != null) { // Enrich error message ClassCastException replace = new ClassCastException( String.format( "%s. Failed to push OutputTag with id '%s' to operator. " + "This can occur when multiple OutputTags with different types " + "but identical names are being used.", e.getMessage(), outputTag.getId())); throw new ExceptionInChainedOperatorException(replace); } else { throw new ExceptionInChainedOperatorException(e); } } catch (Exception e) { throw new ExceptionInChainedOperatorException(e); } } }
for (Map.Entry<Counter, String> metric : counters.entrySet()) { builder .append(metric.getValue()).append(": ").append(metric.getKey().getCount()) .append(lineSeparator);
completedSplitsCounter.inc();