/** * Calculates percentile from {@link TDigest}. * <p>Handles cases where only one value in TDigest object. */ public static double calculatePercentile(@Nonnull TDigest tDigest, int percentile) { if (tDigest.size() == 1) { // Specialize cases where only one value in TDigest (cannot use quantile method) return tDigest.centroids().iterator().next().mean(); } else { return tDigest.quantile(percentile / 100.0); } }
@Override public void add(TDigest other) { List<Centroid> tmp = new ArrayList<>(); for (Centroid centroid : other.centroids()) { tmp.add(centroid); } Collections.shuffle(tmp, gen); for (Centroid centroid : tmp) { add(centroid.mean(), centroid.count(), centroid); } }
@Override public void add(List<? extends TDigest> others) { for (TDigest other : others) { setMinMax(Math.min(min, other.getMin()), Math.max(max, other.getMax())); for (Centroid centroid : other.centroids()) { add(centroid.mean(), centroid.count(), recordAllData ? centroid.data() : null); } } }
@Override public double mean() { Collection<Centroid> centroids = snapshot().centroids(); return centroids.size() == 0 ? Double.NaN : centroids.stream().mapToDouble(c -> (c.count() * c.mean()) / centroids.size()).sum(); }
public double max() { //This is a lie if the winning centroid's weight > 1 return perThreadHistogramBins.values().stream().flatMap(List::stream).map(b -> b.dist.centroids()). mapToDouble(cs -> getLast(cs, new Centroid(MIN_VALUE)).mean()).max().orElse(NaN); }
@Override public double mean() { Collection<Centroid> centroids = snapshot().centroids(); return centroids.size() == 0 ? Double.NaN : centroids.stream().mapToDouble(c -> (c.count() * c.mean()) / centroids.size()).sum(); }
public double min() { // This is a lie if the winning centroid's weight > 1 return perThreadHistogramBins.values().stream().flatMap(List::stream).map(b -> b.dist.centroids()). mapToDouble(cs -> getFirst(cs, new Centroid(MAX_VALUE)).mean()).min().orElse(NaN); }
public double max() { //This is a lie if the winning centroid's weight > 1 return perThreadHistogramBins.values().stream().flatMap(List::stream).map(b -> b.dist.centroids()). mapToDouble(cs -> getLast(cs, new Centroid(MIN_VALUE)).mean()).max().orElse(NaN); }
public double min() { // This is a lie if the winning centroid's weight > 1 return perThreadHistogramBins.values().stream().flatMap(List::stream).map(b -> b.dist.centroids()). mapToDouble(cs -> getFirst(cs, new Centroid(MAX_VALUE)).mean()).min().orElse(NaN); }
@Override public void add(TDigest other) { List<Centroid> tmp = new ArrayList<Centroid>(); for (Centroid centroid : other.centroids()) { tmp.add(centroid); } Collections.shuffle(tmp, gen); for (Centroid centroid : tmp) { add(centroid.mean(), centroid.count(), centroid); } }
private void internalProcessWavefrontHistogram(WavefrontHistogram hist, Context context) throws Exception { final JsonGenerator json = context.json; json.writeStartObject(); json.writeArrayFieldStart("bins"); for (WavefrontHistogram.MinuteBin bin : hist.bins(clear)) { final Collection<Centroid> centroids = bin.getDist().centroids(); json.writeStartObject(); // Count json.writeNumberField("count", bin.getDist().size()); // Start json.writeNumberField("startMillis", bin.getMinMillis()); // Duration json.writeNumberField("durationMillis", 60 * 1000); // Means json.writeArrayFieldStart("means"); for (Centroid c : centroids) { json.writeNumber(c.mean()); } json.writeEndArray(); // Counts json.writeArrayFieldStart("counts"); for (Centroid c : centroids) { json.writeNumber(c.count()); } json.writeEndArray(); json.writeEndObject(); } json.writeEndArray(); json.writeEndObject(); }
private void internalProcessWavefrontHistogram(WavefrontHistogram hist, Context context) throws Exception { final JsonGenerator json = context.json; json.writeStartObject(); json.writeArrayFieldStart("bins"); for (WavefrontHistogram.MinuteBin bin : hist.bins(clear)) { final Collection<Centroid> centroids = bin.getDist().centroids(); json.writeStartObject(); // Count json.writeNumberField("count", bin.getDist().size()); // Start json.writeNumberField("startMillis", bin.getMinMillis()); // Duration json.writeNumberField("durationMillis", 60 * 1000); // Means json.writeArrayFieldStart("means"); for (Centroid c : centroids) { json.writeNumber(c.mean()); } json.writeEndArray(); // Counts json.writeArrayFieldStart("counts"); for (Centroid c : centroids) { json.writeNumber(c.count()); } json.writeEndArray(); json.writeEndObject(); } json.writeEndArray(); json.writeEndObject(); }
@Override public void processHistogram(MetricName name, Histogram histogram, Void context) throws Exception { if (histogram instanceof WavefrontHistogram) { WavefrontHistogram wavefrontHistogram = (WavefrontHistogram) histogram; List<WavefrontHistogram.MinuteBin> bins = wavefrontHistogram.bins(clear); if (bins.isEmpty()) return; // don't send empty histograms. for (WavefrontHistogram.MinuteBin minuteBin : bins) { StringBuilder sb = new StringBuilder(); sb.append("!M ").append(minuteBin.getMinMillis() / 1000); for (Centroid c : minuteBin.getDist().centroids()) { sb.append(" #").append(c.count()).append(" ").append(c.mean()); } sb.append(" \"").append(getName(name)).append("\"").append(tagsForMetricName(name)).append("\n"); histogramsSocket.write(sb.toString()); } } else { if (!sendEmptyHistograms && histogram.count() == 0) { // send count still but skip the others. writeMetric(name, "count", 0); } else { writeMetric(name, "count", histogram.count()); writeSampling(name, histogram); writeSummarizable(name, histogram); if (clear) histogram.clear(); } } }
protected static TDigest merge(Iterable<TDigest> subData, Random gen, TDigest r) { List<Centroid> centroids = new ArrayList<Centroid>(); boolean recordAll = false; for (TDigest digest : subData) { for (Centroid centroid : digest.centroids()) { centroids.add(centroid); } recordAll |= digest.isRecording(); } Collections.shuffle(centroids, gen); if (recordAll) { r.recordAllData(); } for (Centroid c : centroids) { if (r.isRecording()) { // TODO should do something better here. } ((AbstractTDigest) r).add(c.mean(), c.count(), c); } return r; }