/** * Creates an {@link MergingDigest}. This is generally the best known implementation right now. * * @param compression The compression parameter. 100 is a common value for normal uses. 1000 is extremely large. * The number of centroids retained will be a smallish (usually less than 10) multiple of this number. * @return the MergingDigest */ @SuppressWarnings("WeakerAccess") public static TDigest createMergingDigest(double compression) { return new MergingDigest(compression); }
@Override TDigest create(double compression) { return new MergingDigest(compression, (int) (10 * compression)); }
double compression = buf.getDouble(); int n = buf.getInt(); MergingDigest r = new MergingDigest(compression); r.setMinMax(min, max); r.lastUsedCell = n; int n = buf.getShort(); int bufferSize = buf.getShort(); MergingDigest r = new MergingDigest(compression, bufferSize, n); r.setMinMax(min, max); r.lastUsedCell = buf.getShort();
@Setup public void setup() { data = new double[10000000]; for (int i = 0; i < data.length; i++) { data[i] = gen.nextDouble(); } if (method.equals("tree")) { td = new AVLTreeDigest(compression); } else { td = new MergingDigest(500); } // First values are very cheap to add, we are more interested in the steady state, // when the summary is full. Summaries are expected to contain about 5*compression // centroids, hence the 5 factor for (int i = 0; i < 5 * compression; ++i) { td.add(gen.nextDouble()); } }
@Setup public void setup() { data = new double[10000000]; for (int i = 0; i < data.length; i++) { data[i] = gen.nextDouble(); } td = new MergingDigest(compression, (factor + 1) * compression, compression); td.setScaleFunction(ScaleFunction.valueOf(scaleFunction)); // First values are very cheap to add, we are more interested in the steady state, // when the summary is full. Summaries are expected to contain about 0.6*compression // centroids, hence the 5 * compression * (factor+1) for (int i = 0; i < 5 * compression * (factor + 1); ++i) { td.add(gen.nextDouble()); } }