return -fastAsin(-x); } else if (x > 1) { return Double.NaN; double x0 = bound((c0High - x) / c0High); double x1 = bound((c1High - x) / (c1High - c2Low)); double x2 = bound((c2High - x) / (c2High - c3Low)); double x3 = bound((c3High - x) / (c3High - c4Low)); r += mix0 * eval(m0, vars); r += mix1 * eval(m1, vars); r += mix2 * eval(m2, vars); r += mix3 * eval(m3, vars);
double k1 = scale.k(0, publicCompression, totalWeight); double q = 0; double left = 0; for (int i = 0; i < n; i++) { double dq = w[i] / total; double k2 = scale.k(q + dq, publicCompression, totalWeight); q += dq / 2; if (k2 - k1 > 1 && w[i] != 1) { System.out.printf("%sOversize centroid at " + "%d, k0=%.2f, k1=%.2f, dk=%.2f, w=%.2f, q=%.4f, dq=%.4f, left=%.1f, current=%.2f maxw=%.2f\n", header, i, k1, k2, k2 - k1, w[i], q, dq, left, w[i], scale.max(q, publicCompression, totalWeight)); header = ""; badCount++; String.format("Egregiously oversized centroid at " + "%d, k0=%.2f, k1=%.2f, dk=%.2f, w=%.2f, q=%.4f, dq=%.4f, left=%.1f, current=%.2f, maxw=%.2f\n", i, k1, k2, k2 - k1, w[i], q, dq, left, w[i], scale.max(q, publicCompression, totalWeight)));
public void setScaleFunction(ScaleFunction scaleFunction) { if (scaleFunction.toString().endsWith("NO_NORM")) { throw new IllegalArgumentException( String.format("Can't use %s as scale with %s", scaleFunction, this.getClass())); } this.scale = scaleFunction; }
double k1 = scale.k(0, compression, totalWeight); double wLimit = totalWeight * scale.q(k1 + 1, compression, totalWeight); for (int i = 1; i < incomingCount; i++) { int ix = incomingOrder[i]; double q0 = wSoFar / totalWeight; double q2 = (wSoFar + proposedWeight) / totalWeight; addThis = proposedWeight <= Math.min(scale.max(q0, compression, totalWeight), scale.max(q2, compression, totalWeight)); } else { addThis = projectedW <= wLimit; k1 = scale.k(wSoFar / totalWeight, compression, totalWeight); wLimit = totalWeight * scale.q(k1 + 1, compression, totalWeight);
double k0 = scale.max(n0 / count, compression, count); int node = summary.first(); int w0 = summary.count(node); while (after != IntAVLTree.NIL) { w1 = summary.count(after); k1 = scale.max((n1 + w1) / count, compression, count); if (w0 + w1 > Math.min(k0, k1)) { break; if (node != IntAVLTree.NIL) { n0 = n1; k0 = scale.max(n0 / count, compression, count); w0 = w1; n1 = n0 + w0;
@Setup public void setup() { data = new double[10000000]; for (int i = 0; i < data.length; i++) { data[i] = gen.nextDouble(); } td = new MergingDigest(compression, (factor + 1) * compression, compression); td.setScaleFunction(ScaleFunction.valueOf(scaleFunction)); // First values are very cheap to add, we are more interested in the steady state, // when the summary is full. Summaries are expected to contain about 0.6*compression // centroids, hence the 5 * compression * (factor+1) for (int i = 0; i < 5 * compression * (factor + 1); ++i) { td.add(gen.nextDouble()); } }