@Override public synchronized long getValueAtPercentile(final double percentile) { return super.getValueAtPercentile(percentile); }
@Override public double getValue(double quantile) { return histogram.getValueAtPercentile(quantile * 100.0); }
p0 = (int) underlying.getValueAtPercentile(0); p5 = (int) underlying.getValueAtPercentile(5); p10 = (int) underlying.getValueAtPercentile(10); p15 = (int) underlying.getValueAtPercentile(15); p20 = (int) underlying.getValueAtPercentile(20); p25 = (int) underlying.getValueAtPercentile(25); p30 = (int) underlying.getValueAtPercentile(30); p35 = (int) underlying.getValueAtPercentile(35); p40 = (int) underlying.getValueAtPercentile(40); p45 = (int) underlying.getValueAtPercentile(45); p50 = (int) underlying.getValueAtPercentile(50); p55 = (int) underlying.getValueAtPercentile(55); p60 = (int) underlying.getValueAtPercentile(60); p65 = (int) underlying.getValueAtPercentile(65); p70 = (int) underlying.getValueAtPercentile(70); p75 = (int) underlying.getValueAtPercentile(75); p80 = (int) underlying.getValueAtPercentile(80); p85 = (int) underlying.getValueAtPercentile(85); p90 = (int) underlying.getValueAtPercentile(90); p95 = (int) underlying.getValueAtPercentile(95); p99 = (int) underlying.getValueAtPercentile(99); p99_5 = (int) underlying.getValueAtPercentile(99.5); p99_9 = (int) underlying.getValueAtPercentile(99.9); p99_95 = (int) underlying.getValueAtPercentile(99.95); p99_99 = (int) underlying.getValueAtPercentile(99.99); p100 = (int) underlying.getValueAtPercentile(100);
public double getLatencyAtPercentile(double percential, TimeUnit unit) { return convert(histo.getValueAtPercentile(percential), TimeUnit.NANOSECONDS, unit); }
private Map<Double, Long> getPercentiles(Histogram histogram) { Map<Double, Long> percentiles = new TreeMap<Double, Long>(); for (double targetPercentile : options.targetPercentiles()) { percentiles.put(targetPercentile, options.targetUnit().convert(histogram.getValueAtPercentile(targetPercentile), TimeUnit.NANOSECONDS)); } return percentiles; }
/** * This is called periodically from the StatusThread. There's a single * StatusThread per Client process. We optionally serialize the interval to * log on this opportunity. * * @see com.yahoo.ycsb.measurements.OneMeasurement#getSummary() */ @Override public String getSummary() { Histogram intervalHistogram = getIntervalHistogramAndAccumulate(); // we use the summary interval as the histogram file interval. if (histogramLogWriter != null) { histogramLogWriter.outputIntervalHistogram(intervalHistogram); } DecimalFormat d = new DecimalFormat("#.##"); return "[" + getName() + ": Count=" + intervalHistogram.getTotalCount() + ", Max=" + intervalHistogram.getMaxValue() + ", Min=" + intervalHistogram.getMinValue() + ", Avg=" + d.format(intervalHistogram.getMean()) + ", 90=" + d.format(intervalHistogram.getValueAtPercentile(90)) + ", 99=" + d.format(intervalHistogram.getValueAtPercentile(99)) + ", 99.9=" + d.format(intervalHistogram.getValueAtPercentile(99.9)) + ", 99.99=" + d.format(intervalHistogram.getValueAtPercentile(99.99)) + "]"; }
((Histogram) intervalHistogram).getValueAtPercentile(50.0) / config.outputValueUnitRatio, ((Histogram) intervalHistogram).getValueAtPercentile(90.0) / config.outputValueUnitRatio, ((Histogram) intervalHistogram).getMaxValue() / config.outputValueUnitRatio, accumulatedRegularHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio, accumulatedRegularHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio, accumulatedRegularHistogram.getValueAtPercentile(99.0) / config.outputValueUnitRatio, accumulatedRegularHistogram.getValueAtPercentile(99.9) / config.outputValueUnitRatio, accumulatedRegularHistogram.getValueAtPercentile(99.99) / config.outputValueUnitRatio, accumulatedRegularHistogram.getMaxValue() / config.outputValueUnitRatio ); ((Histogram) movingWindowSumHistogram).getValueAtPercentile(config.movingWindowPercentileToReport) / config.outputValueUnitRatio, ((Histogram) movingWindowSumHistogram).getMaxValue() / config.outputValueUnitRatio );
totalHistogram.getValueAtPercentile(percentile));
public long getValueAtPercentile(double percentile) { if (histogram == null) { if (size == 0) { // this is consistent with HdrHistogram behavior return 0; } if (!sorted) { sortValues(); } if (percentile == 0) { // support "0th" percentile to mean the smallest tracked percentile return values[0]; } return values[(int) Math.ceil(size * percentile / 100) - 1]; } return histogram.getValueAtPercentile(percentile); }
/** * Returns the request latency at a given percentile. * * @param host the host (if this is relevant in the way percentiles are categorized). * @param statement the statement (if this is relevant in the way percentiles are categorized). * @param exception the exception (if this is relevant in the way percentiles are categorized). * @param percentile the percentile (for example, {@code 99.0} for the 99th percentile). * @return the latency (in milliseconds) at the given percentile, or a negative value if it's not * available yet. * @see #computeKey(Host, Statement, Exception) */ public long getLatencyAtPercentile( Host host, Statement statement, Exception exception, double percentile) { checkArgument( percentile >= 0.0 && percentile < 100, "percentile must be between 0.0 and 100 (was %s)", percentile); Histogram histogram = getLastIntervalHistogram(host, statement, exception); if (histogram == null || histogram.getTotalCount() < minRecordedValues) return -1; return histogram.getValueAtPercentile(percentile); }
@UserAggregationResult public List<Number> result() { long totalCount = values != null ? values.getTotalCount() : doubles.getTotalCount(); boolean empty = totalCount == 0; List<Number> result = new ArrayList<>(percentiles.size()); for (Double percentile : percentiles) { if (percentile == null || empty) { result.add(null); } else { if (values != null) { result.add(values.getValueAtPercentile(percentile * 100D)); } else { result.add(doubles.getValueAtPercentile(percentile * 100D)); } } } return result; } }
@UserAggregationResult public Map<String,Number> result() { long totalCount = values != null ? values.getTotalCount() : doubles.getTotalCount(); boolean empty = totalCount == 0; Map<String,Number> result = new LinkedHashMap<>(percentiles.size()+6); result.put("min",values != null ? (Number)values.getMinValue() : (Number)doubles.getMinValue()); result.put("minNonZero",values != null ? (Number)values.getMinNonZeroValue() : (Number)doubles.getMinNonZeroValue()); result.put("max",values != null ? (Number)values.getMaxValue() : (Number)doubles.getMaxValue()); result.put("total",totalCount); result.put("mean",values != null ? values.getMean() : doubles.getMean()); result.put("stdev",values != null ? values.getStdDeviation() : doubles.getStdDeviation()); for (Double percentile : percentiles) { if (percentile != null && !empty) { if (values != null) { result.put(percentile.toString(), values.getValueAtPercentile(percentile * 100D)); } else { result.put(percentile.toString(), doubles.getValueAtPercentile(percentile * 100D)); } } } return result; } }
@Override public double getValue(double quantile) { double percentile = quantile * 100.0; return histogram.getValueAtPercentile(percentile); }
@Override public Long getValue() { return histogram.getValueAtPercentile(percentile); } }
@Override public double getValue(double quantile) { double percentile = quantile * 100.0; return histogram.getValueAtPercentile(percentile); }
private static double scaledPercentile( final Histogram histogram, final double scalingFactor, final double percentile) { return histogram.getValueAtPercentile(percentile) / scalingFactor; } }
private static void printAggregatedStats() { Histogram reportHistogram = cumulativeRecorder.getIntervalHistogram(); log.info( "Aggregated latency stats --- Latency: mean: {} ms - med: {} - 95pct: {} - 99pct: {} - 99.9pct: {} - 99.99pct: {} - 99.999pct: {} - Max: {}", dec.format(reportHistogram.getMean()), (long) reportHistogram.getValueAtPercentile(50), (long) reportHistogram.getValueAtPercentile(95), (long) reportHistogram.getValueAtPercentile(99), (long) reportHistogram.getValueAtPercentile(99.9), (long) reportHistogram.getValueAtPercentile(99.99), (long) reportHistogram.getValueAtPercentile(99.999), (long) reportHistogram.getMaxValue()); }
public void updateStats() { topicLoadHistogram = topicLoadTimeRecorder.getIntervalHistogram(topicLoadHistogram); this.elapsedIntervalMs = (TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - topicLoadRecordStartTime); topicLoadRecordStartTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime()); this.meanTopicLoadMs = topicLoadHistogram.getMean(); this.medianTopicLoadMs = topicLoadHistogram.getValueAtPercentile(50); this.topicLoad95Ms = topicLoadHistogram.getValueAtPercentile(95); this.topicLoad99Ms = topicLoadHistogram.getValueAtPercentile(99); this.topicLoad999Ms = topicLoadHistogram.getValueAtPercentile(99.9); this.topicsLoad9999Ms = topicLoadHistogram.getValueAtPercentile(99.99); this.topicLoadCounts = topicLoadHistogram.getTotalCount(); }
private Map<Double, Long> getPercentiles(Histogram histogram) { Map<Double, Long> percentiles = new TreeMap<Double, Long>(); for (double targetPercentile : options.targetPercentiles()) { percentiles.put(targetPercentile, options.targetUnit().convert(histogram.getValueAtPercentile(targetPercentile), TimeUnit.NANOSECONDS)); } return percentiles; }
public static void printStats(final Histogram histogram) { System.out.printf( "Max = %d, Mean = %f, 99.9%% = %d%n", histogram.getMaxValue(), histogram.getMean(), histogram.getValueAtPercentile(99.9)); } }