@Override public void dump(OutputStream output) { PrintWriter p = null; try { p = new PrintWriter(new OutputStreamWriter(output, UTF_8)); for (HistogramIterationValue value : histogram.recordedValues()) { for (int j = 0; j < value.getCountAddedInThisIterationStep(); j++) { p.printf("%d%n", value.getValueIteratedTo()); } } } catch (Exception e) { if(p != null) p.close(); logger.error("Exception:", e); } } }
public double getValueIteratedFrom() { return integerHistogramIterationValue.getValueIteratedFrom() * integerHistogramIterationValue.getIntegerToDoubleValueConversionRatio(); }
toHistogram.recordValueWithCountAndExpectedInterval(v.getValueIteratedTo(), v.getCountAtValueIteratedTo(), expectedIntervalBetweenValueSamples);
public double getValueIteratedTo() { return integerHistogramIterationValue.getValueIteratedTo() * integerHistogramIterationValue.getIntegerToDoubleValueConversionRatio(); }
@Managed(description = "Per-bucket counts") public Map<Double, Long> getCounts() { Map<Double, Long> result = new TreeMap<>(); for (HistogramIterationValue entry : snapshot.get().logarithmicBucketValues(TimeUnit.MILLISECONDS.toNanos(1), 2)) { double median = (entry.getValueIteratedTo() + entry.getValueIteratedFrom()) / 2.0; result.put(round(median / (double) TimeUnit.MILLISECONDS.toNanos(1), 2), entry.getCountAddedInThisIterationStep()); } return result; }
@Managed(description = "Per-bucket total pause time in s") public Map<Double, Double> getSums() { long previous = 0; Map<Double, Double> result = new TreeMap<>(); for (HistogramIterationValue entry : snapshot.get().logarithmicBucketValues(TimeUnit.MILLISECONDS.toNanos(1), 2)) { double median = (entry.getValueIteratedTo() + entry.getValueIteratedFrom()) / 2.0; long current = entry.getTotalValueToThisValue(); result.put(round(median / TimeUnit.MILLISECONDS.toNanos(1), 2), round((current - previous) * 1.0 / TimeUnit.SECONDS.toNanos(1), 2)); previous = current; } return result; }
if (iterationValue.getPercentileLevelIteratedTo() != 100.0D) { printStream.format(Locale.US, percentileFormatString, iterationValue.getValueIteratedTo() / outputValueUnitScalingRatio, iterationValue.getPercentileLevelIteratedTo()/100.0D, iterationValue.getTotalCountToThisValue(), 1/(1.0D - (iterationValue.getPercentileLevelIteratedTo()/100.0D)) ); } else { printStream.format(Locale.US, lastLinePercentileFormatString, iterationValue.getValueIteratedTo() / outputValueUnitScalingRatio, iterationValue.getPercentileLevelIteratedTo()/100.0D, iterationValue.getTotalCountToThisValue());
HistogramIterationValue val = pi.next(); String key = prefix + String.format(Locale.US, percentileFormatString, val.getPercentileLevelIteratedTo() / 100d); String value = String.format(Locale.US, "%d", val.getValueIteratedTo());
result.aggregatedPublishLatencyQuantiles.put(value.getPercentile(), value.getValueIteratedTo() / 1000.0); }); result.aggregatedEndToEndLatencyQuantiles.put(value.getPercentile(), microsToMillis(value.getValueIteratedTo())); });
@Override public long[] getValues() { long[] values = new long[1024]; int i = 0; for (HistogramIterationValue value : histogram.recordedValues()) { values[i] = value.getValueIteratedTo(); i++; if (i == values.length) { values = Arrays.copyOf(values, values.length * 2); } } return Arrays.copyOf(values, i); }
public long getCountAddedInThisIterationStep() { return integerHistogramIterationValue.getCountAddedInThisIterationStep(); }
public long getCountAtValueIteratedTo() { return integerHistogramIterationValue.getCountAtValueIteratedTo(); }
public double getPercentile() { return integerHistogramIterationValue.getPercentile(); }
@Managed(description = "Per-bucket counts") public Map<Double, Long> getCounts() { Map<Double, Long> result = new TreeMap<>(); for (HistogramIterationValue entry : snapshot.get().logarithmicBucketValues(TimeUnit.MILLISECONDS.toNanos(1), 2)) { double median = (entry.getValueIteratedTo() + entry.getValueIteratedFrom()) / 2.0; result.put(round(median / (double) TimeUnit.MILLISECONDS.toNanos(1), 2), entry.getCountAddedInThisIterationStep()); } return result; }
@Managed(description = "Per-bucket total pause time in s") public Map<Double, Double> getSums() { long previous = 0; Map<Double, Double> result = new TreeMap<>(); for (HistogramIterationValue entry : snapshot.get().logarithmicBucketValues(TimeUnit.MILLISECONDS.toNanos(1), 2)) { double median = (entry.getValueIteratedTo() + entry.getValueIteratedFrom()) / 2.0; long current = entry.getTotalValueToThisValue(); result.put(round(median / TimeUnit.MILLISECONDS.toNanos(1), 2), round((current - previous) * 1.0 / TimeUnit.SECONDS.toNanos(1), 2)); previous = current; } return result; }
if (iterationValue.getPercentileLevelIteratedTo() != 100.0D) { printStream.format(Locale.US, percentileFormatString, iterationValue.getValueIteratedTo() / outputValueUnitScalingRatio, iterationValue.getPercentileLevelIteratedTo()/100.0D, iterationValue.getTotalCountToThisValue(), 1/(1.0D - (iterationValue.getPercentileLevelIteratedTo()/100.0D)) ); } else { printStream.format(Locale.US, lastLinePercentileFormatString, iterationValue.getValueIteratedTo() / outputValueUnitScalingRatio, iterationValue.getPercentileLevelIteratedTo()/100.0D, iterationValue.getTotalCountToThisValue());
public double getValueIteratedTo() { return integerHistogramIterationValue.getValueIteratedTo() * integerHistogramIterationValue.getIntegerToDoubleValueConversionRatio(); }
@Override public long[] getValues() { long[] values = new long[1024]; int i = 0; for (HistogramIterationValue value : histogram.recordedValues()) { values[i] = value.getValueIteratedTo(); i++; if (i == values.length) { values = Arrays.copyOf(values, values.length * 2); } } return Arrays.copyOf(values, i); }
public long getCountAddedInThisIterationStep() { return integerHistogramIterationValue.getCountAddedInThisIterationStep(); }
public long getCountAtValueIteratedTo() { return integerHistogramIterationValue.getCountAtValueIteratedTo(); }