@StartBundle public void startBundle() { bundleDist.update(10L); }
public static void update(String name, long value) { Metrics.distribution(FeastMetrics.FEAST_NAMESPACE, name).update(value); }
@ProcessElement public void processElement(@Element String element, OutputReceiver<String> receiver) { lineLenDist.update(element.length()); if (element.trim().isEmpty()) { emptyLines.inc(); } // Split the line into words. String[] words = element.split(ExampleUtils.TOKENIZER_PATTERN, -1); // Output each word encountered into the output PCollection. for (String word : words) { if (!word.isEmpty()) { receiver.output(word); } } } }
@Override public void update(long value) { MetricsContainer container = MetricsEnvironment.getCurrentContainer(); if (container != null) { container.getDistribution(name).update(value); } }
@Test public void testDistributionToCell() { MetricsContainer mockContainer = Mockito.mock(MetricsContainer.class); Distribution mockDistribution = Mockito.mock(Distribution.class); when(mockContainer.getDistribution(METRIC_NAME)).thenReturn(mockDistribution); Distribution distribution = Metrics.distribution(NS, NAME); MetricsEnvironment.setCurrentContainer(mockContainer); distribution.update(5L); verify(mockDistribution).update(5L); distribution.update(36L); distribution.update(1L); verify(mockDistribution).update(36L); verify(mockDistribution).update(1L); }
@DoFn.FinishBundle public void finishBundle() { bundleDist.update(40L); } }))
@ProcessElement public void processElement(ProcessContext c) { timeDistribution.update(c.element().longValue()); } }
@ProcessElement public void processElement(ProcessContext c) throws IOException, GeneralSecurityException { Metrics.counter(RetrieveReads.class, "Initialized Shard Count").inc(); Stopwatch stopWatch = Stopwatch.createStarted(); Iterator<StreamReadsResponse> iter = ReadStreamIterator.enforceShardBoundary(auth, c.element(), shardBoundary, fields); while (iter.hasNext()) { StreamReadsResponse readResponse = iter.next(); c.output(readResponse.getAlignmentsList()); } stopWatch.stop(); Metrics.distribution(RetrieveReads.class, "Shard Processing Time (sec)") .update(stopWatch.elapsed(TimeUnit.SECONDS)); Metrics.counter(RetrieveReads.class, "Finished Shard Count").inc(); } }
@ProcessElement public void processElement(ProcessContext c) throws IOException, GeneralSecurityException { Metrics.counter(RetrieveReads.class, "Initialized Shard Count").inc(); Stopwatch stopWatch = Stopwatch.createStarted(); Iterator<StreamReadsResponse> iter = ReadStreamIterator.enforceShardBoundary(auth, c.element(), shardBoundary, fields); while (iter.hasNext()) { StreamReadsResponse readResponse = iter.next(); c.output(readResponse.getAlignmentsList()); } stopWatch.stop(); Metrics.distribution(RetrieveReads.class, "Shard Processing Time (sec)") .update(stopWatch.elapsed(TimeUnit.SECONDS)); Metrics.counter(RetrieveReads.class, "Finished Shard Count").inc(); } }
@ProcessElement public void processElement(ProcessContext c) throws IOException, GeneralSecurityException, InterruptedException { Metrics.counter(RetrieveVariants.class, "Initialized Shard Count").inc(); Stopwatch stopWatch = Stopwatch.createStarted(); Iterator<StreamVariantsResponse> iter = VariantStreamIterator.enforceShardBoundary(auth, c.element(), shardBoundary, fields); while (iter.hasNext()) { StreamVariantsResponse variantResponse = iter.next(); c.output(variantResponse.getVariantsList()); } stopWatch.stop(); Metrics.distribution(RetrieveVariants.class, "Shard Processing Time (sec)") .update(stopWatch.elapsed(TimeUnit.SECONDS)); Metrics.counter(RetrieveVariants.class, "Finished Shard Count").inc(); stats.addValue(stopWatch.elapsed(TimeUnit.SECONDS)); LOG.info("Shard Duration in Seconds - Min: " + stats.getMin() + " Max: " + stats.getMax() + " Avg: " + stats.getMean() + " StdDev: " + stats.getStandardDeviation()); } }
@ProcessElement public void processElement(ProcessContext c) throws IOException, GeneralSecurityException, InterruptedException { Metrics.counter(RetrieveVariants.class, "Initialized Shard Count").inc(); Stopwatch stopWatch = Stopwatch.createStarted(); Iterator<StreamVariantsResponse> iter = VariantStreamIterator.enforceShardBoundary(auth, c.element(), shardBoundary, fields); while (iter.hasNext()) { StreamVariantsResponse variantResponse = iter.next(); c.output(variantResponse.getVariantsList()); } stopWatch.stop(); Metrics.distribution(RetrieveVariants.class, "Shard Processing Time (sec)") .update(stopWatch.elapsed(TimeUnit.SECONDS)); Metrics.counter(RetrieveVariants.class, "Finished Shard Count").inc(); stats.addValue(stopWatch.elapsed(TimeUnit.SECONDS)); LOG.info("Shard Duration in Seconds - Min: " + stats.getMin() + " Max: " + stats.getMax() + " Avg: " + stats.getMean() + " StdDev: " + stats.getStandardDeviation()); } }
@FinishBundle public void finishBundle(DoFn<Read, String>.FinishBundleContext c) throws IOException { bw.close(); Metrics.distribution(WriteBAMFn.class, "Maximum Write Shard Processing Time (sec)") .update(stopWatch.elapsed(TimeUnit.SECONDS)); LOG.info("Finished writing " + shardContig); Metrics.counter(WriteBAMFn.class, "Finished Write Shard Count").inc(); final long bytesWritten = ts.getBytesWrittenExceptingTruncation(); LOG.info("Wrote " + readCount + " reads, " + unmappedReadCount + " unmapped, into " + shardName + (hadOutOfOrder ? "ignored out of order" : "") + ", wrote " + bytesWritten + " bytes"); Metrics.counter(WriteBAMFn.class, "Written reads").inc(readCount); Metrics.counter(WriteBAMFn.class, "Written unmapped reads").inc(unmappedReadCount); final long totalReadCount = (long)readCount + (long)unmappedReadCount; Metrics.distribution(WriteBAMFn.class, "Maximum Reads Per Shard").update(totalReadCount); c.output(shardName, window.maxTimestamp(), window); c.output(SEQUENCE_SHARD_SIZES_TAG, KV.of(sequenceIndex, bytesWritten), window.maxTimestamp(), window); }
@FinishBundle public void finishBundle(DoFn<Read, String>.FinishBundleContext c) throws IOException { bw.close(); Metrics.distribution(WriteBAMFn.class, "Maximum Write Shard Processing Time (sec)") .update(stopWatch.elapsed(TimeUnit.SECONDS)); LOG.info("Finished writing " + shardContig); Metrics.counter(WriteBAMFn.class, "Finished Write Shard Count").inc(); final long bytesWritten = ts.getBytesWrittenExceptingTruncation(); LOG.info("Wrote " + readCount + " reads, " + unmappedReadCount + " unmapped, into " + shardName + (hadOutOfOrder ? "ignored out of order" : "") + ", wrote " + bytesWritten + " bytes"); Metrics.counter(WriteBAMFn.class, "Written reads").inc(readCount); Metrics.counter(WriteBAMFn.class, "Written unmapped reads").inc(unmappedReadCount); final long totalReadCount = (long)readCount + (long)unmappedReadCount; Metrics.distribution(WriteBAMFn.class, "Maximum Reads Per Shard").update(totalReadCount); c.output(shardName, window.maxTimestamp(), window); c.output(SEQUENCE_SHARD_SIZES_TAG, KV.of(sequenceIndex, bytesWritten), window.maxTimestamp(), window); }
@Test public void testDistributionWithoutContainer() { assertNull(MetricsEnvironment.getCurrentContainer()); // Should not fail even though there is no metrics container. Metrics.distribution(NS, NAME).update(5L); }
@Test public void testDistribution() { FlinkMetricContainer.FlinkDistributionGauge flinkGauge = new FlinkMetricContainer.FlinkDistributionGauge(DistributionResult.IDENTITY_ELEMENT); when(metricGroup.gauge(eq("namespace.name"), anyObject())).thenReturn(flinkGauge); FlinkMetricContainer container = new FlinkMetricContainer(runtimeContext); MetricsContainer step = container.getMetricsContainer("step"); MetricName metricName = MetricName.named("namespace", "name"); Distribution distribution = step.getDistribution(metricName); assertThat(flinkGauge.getValue(), is(DistributionResult.IDENTITY_ELEMENT)); // first set will install the mocked distribution container.updateMetrics("step"); distribution.update(42); distribution.update(-23); distribution.update(0); distribution.update(1); container.updateMetrics("step"); assertThat(flinkGauge.getValue().getMax(), is(42L)); assertThat(flinkGauge.getValue().getMin(), is(-23L)); assertThat(flinkGauge.getValue().getCount(), is(4L)); assertThat(flinkGauge.getValue().getSum(), is(20L)); assertThat(flinkGauge.getValue().getMean(), is(5.0)); } }
noCoordinateReads + " no coordinate reads, " + skippedReads + ", skipped reads"); stopWatch.stop(); Metrics.distribution(WriteBAIFn.class, "Indexing Shard Processing Time (sec)").update( stopWatch.elapsed(TimeUnit.SECONDS)); Metrics.counter(WriteBAIFn.class, "Finished Indexing Shard Count").inc(); Metrics.counter(WriteBAIFn.class, "Indexed reads").inc(processedReads); Metrics.counter(WriteBAIFn.class, "Indexed no coordinate reads").inc(noCoordinateReads); Metrics.distribution(WriteBAIFn.class, "Reads Per Indexing Shard").update(processedReads);
noCoordinateReads + " no coordinate reads, " + skippedReads + ", skipped reads"); stopWatch.stop(); Metrics.distribution(WriteBAIFn.class, "Indexing Shard Processing Time (sec)").update( stopWatch.elapsed(TimeUnit.SECONDS)); Metrics.counter(WriteBAIFn.class, "Finished Indexing Shard Count").inc(); Metrics.counter(WriteBAIFn.class, "Indexed reads").inc(processedReads); Metrics.counter(WriteBAIFn.class, "Indexed no coordinate reads").inc(noCoordinateReads); Metrics.distribution(WriteBAIFn.class, "Reads Per Indexing Shard").update(processedReads);
BeamFnApi.IntDistributionData intDistributionData = distributionData.getIntDistributionData(); distribution.update( intDistributionData.getSum(), intDistributionData.getCount(),
@SuppressWarnings("unused") @ProcessElement public void processElement(ProcessContext c) { Distribution values = Metrics.distribution(MetricsTest.class, "input"); count.inc(); values.update(c.element()); c.output(c.element()); c.output(c.element()); }
@SuppressWarnings("unused") @ProcessElement public void processElement(ProcessContext c) { Distribution values = Metrics.distribution(MetricsTest.class, "input"); Gauge gauge = Metrics.gauge(MetricsTest.class, "my-gauge"); Integer element = c.element(); count.inc(); values.update(element); gauge.set(12L); c.output(element); c.output(output2, element); } })