/** * Creates an AVLTreeDigest. AVLTreeDigest is nearly the best known implementation right now. * * @param compression The compression parameter. 100 is a common value for normal uses. 1000 is extremely large. * The number of centroids retained will be a smallish (usually less than 10) multiple of this number. * @return the AvlTreeDigest */ @SuppressWarnings("WeakerAccess") public static TDigest createAvlTreeDigest(double compression) { return new AVLTreeDigest(compression); }
@Override TDigest create(double compression) { return new AVLTreeDigest(compression); }
@Setup public void setup() { data = new double[10000000]; for (int i = 0; i < data.length; i++) { data[i] = gen.nextDouble(); } if (method.equals("tree")) { td = new AVLTreeDigest(compression); } else { td = new MergingDigest(500); } // First values are very cheap to add, we are more interested in the steady state, // when the summary is full. Summaries are expected to contain about 5*compression // centroids, hence the 5 factor for (int i = 0; i < 5 * compression; ++i) { td.add(gen.nextDouble()); } }
double max = buf.getDouble(); double compression = buf.getDouble(); AVLTreeDigest r = new AVLTreeDigest(compression); r.setMinMax(min, max); int n = buf.getInt(); double max = buf.getDouble(); double compression = buf.getDouble(); AVLTreeDigest r = new AVLTreeDigest(compression); r.setMinMax(min, max); int n = buf.getInt();
public OnlineStatisticsProvider() { digest = new AVLTreeDigest(COMPRESSION); }
private static TDigest getDefaultTDigest() { //return TDigest.createDigest(COMPRESSION); return new AVLTreeDigest(COMPRESSION); //return new TreeDigest(COMPRESSION); //return new ArrayDigest(4, COMPRESSION); //return new MergingDigest(COMPRESSION); }
MinuteBin(long minMillis) { dist = new AVLTreeDigest(ACCURACY); this.minMillis = minMillis; }
MinuteBin(long minMillis) { dist = new AVLTreeDigest(ACCURACY); this.minMillis = minMillis; }
public TDunningTDigest(double compression) { m_delegate = new AVLTreeDigest(compression); }
private TDigest snapshot() { final TDigest snapshot = new AVLTreeDigest(ACCURACY); perThreadHistogramBins.values().stream().flatMap(List::stream).forEach(bin -> snapshot.add(bin.dist)); return snapshot; }
private TDigest snapshot() { final TDigest snapshot = new AVLTreeDigest(ACCURACY); perThreadHistogramBins.values().stream().flatMap(List::stream).forEach(bin -> snapshot.add(bin.dist)); return snapshot; }
protected static AVLTreeDigest delegateMerge(double compression, List<AVLTreeDigest> subData) { int n = Math.max(1, subData.size() / 4); AVLTreeDigest r = new AVLTreeDigest(compression); if (subData.get(0).isRecording()) { r.recordAllData(); } for (int i = 0; i < subData.size(); i += n) { if (n > 1) { r.add(delegateMerge(compression, subData.subList(i, Math.min(i + n, subData.size())))); } else { r.add(subData.get(i)); } } return r; } }
@Override public void setup() { ExecutorService pool = Executors.newFixedThreadPool(1); BlockingQueue<State> q = new ArrayBlockingQueue<>(2000); input = q; pool.submit(new Producer(q)); speedDistribution = new AVLTreeDigest(300); noise = new Random(); speed = new Stripchart(10, 430, 460, 80, 1, 0, 0, 90); rpm = new Stripchart(10, 520, 460, 80, 1, 0, 0, 2200); throttle = new Stripchart(10, 610, 460, 80, 1, 0, 0, 100); frameRate(15); }
if (encoding == VERBOSE_ENCODING) { double compression = buf.getDouble(); AVLTreeDigest r = new AVLTreeDigest(compression); int n = buf.getInt(); double[] means = new double[n]; } else if (encoding == SMALL_ENCODING) { double compression = buf.getDouble(); AVLTreeDigest r = new AVLTreeDigest(compression); int n = buf.getInt(); double[] means = new double[n];