double alpha = qi.norm(2); if (Math.abs(alpha) > Double.MIN_VALUE) { qi.assign(Functions.div(alpha)); } else { if (Double.isInfinite(alpha) || Double.isNaN(alpha)) {
L.viewColumn(k).viewPart(k, n - k).assign(Functions.div(akk)); L.viewColumn(k).viewPart(0, k).assign(0);
private void decompose(Matrix a) { int n = a.rowSize(); L.assign(a); // column-wise submatrix cholesky with simple pivoting for (int k = 0; k < n; k++) { double akk = L.get(k, k); // set upper part of column to 0. L.viewColumn(k).viewPart(0, k).assign(0); double epsilon = 1.0e-10 * L.viewColumn(k).aggregate(Functions.MAX, Functions.ABS); if (akk <= epsilon) { // degenerate column case. Set diagonal to 1, all others to zero L.viewColumn(k).viewPart(k, n - k).assign(0); isPositiveDefinite = false; // no need to subtract from remaining sub-matrix } else { // normalize column by diagonal element akk = Math.sqrt(Math.max(0, akk)); L.set(k, k, akk); L.viewColumn(k).viewPart(k + 1, n - k - 1).assign(Functions.div(akk)); // now subtract scaled version of column for (int j = k + 1; j < n; j++) { Vector columnJ = L.viewColumn(j).viewPart(j, n - j); Vector columnK = L.viewColumn(k).viewPart(j, n - j); columnJ.assign(columnK, Functions.minusMult(L.get(j, k))); } } } }
nrm = -nrm; QRcolumnsPart[k].assign(Functions.div(nrm));
@Test public void testUpdate() { MultiNormal f = new MultiNormal(20); Vector a = f.sample(); Vector b = f.sample(); Vector c = f.sample(); DenseVector x = new DenseVector(a); Centroid x1 = new Centroid(1, x); x1.update(new Centroid(2, new DenseVector(b))); Centroid x2 = new Centroid(x1); x1.update(c); // check for correct value Vector mean = a.plus(b).plus(c).assign(Functions.div(3)); assertEquals(0, x1.getVector().minus(mean).norm(1), 1.0e-8); assertEquals(3, x1.getWeight(), 0); assertEquals(0, x2.minus(a.plus(b).divide(2)).norm(1), 1.0e-8); assertEquals(2, x2.getWeight(), 0); assertEquals(0, new Centroid(x1.getIndex(), x1, x1.getWeight()).minus(x1).norm(1), 1.0e-8); // and verify shared storage assertEquals(0, x.minus(x1).norm(1), 0); assertEquals(3, x1.getWeight(), 1.0e-8); assertEquals(1, x1.getIndex()); }
@Override protected void map(IntWritable key, VectorWritable value, Context context) throws IOException, InterruptedException { value.get().assign(Functions.div(value.get().norm(1.0))); context.write(key, value); } }
@Override protected void map(IntWritable key, VectorWritable value, Context context) throws IOException, InterruptedException { value.get().assign(Functions.div(value.get().norm(1.0))); context.write(key, value); } }
@Override protected void map(IntWritable key, VectorWritable value, Context context) throws IOException, InterruptedException { value.get().assign(Functions.div(value.get().norm(1.0))); context.write(key, value); } }
double alpha = qi.norm(2); if (Math.abs(alpha) > Double.MIN_VALUE) { qi.assign(Functions.div(alpha)); } else { if (Double.isInfinite(alpha) || Double.isNaN(alpha)) {
L.viewColumn(k).viewPart(k, n - k).assign(Functions.div(akk)); L.viewColumn(k).viewPart(0, k).assign(0);
L.viewColumn(k).viewPart(k, n - k).assign(Functions.div(akk)); L.viewColumn(k).viewPart(0, k).assign(0);
private void decompose(Matrix a) { int n = a.rowSize(); L.assign(a); // column-wise submatrix cholesky with simple pivoting for (int k = 0; k < n; k++) { double akk = L.get(k, k); // set upper part of column to 0. L.viewColumn(k).viewPart(0, k).assign(0); double epsilon = 1.0e-10 * L.viewColumn(k).aggregate(Functions.MAX, Functions.ABS); if (akk <= epsilon) { // degenerate column case. Set diagonal to 1, all others to zero L.viewColumn(k).viewPart(k, n - k).assign(0); isPositiveDefinite = false; // no need to subtract from remaining sub-matrix } else { // normalize column by diagonal element akk = Math.sqrt(Math.max(0, akk)); L.set(k, k, akk); L.viewColumn(k).viewPart(k + 1, n - k - 1).assign(Functions.div(akk)); // now subtract scaled version of column for (int j = k + 1; j < n; j++) { Vector columnJ = L.viewColumn(j).viewPart(j, n - j); Vector columnK = L.viewColumn(k).viewPart(j, n - j); columnJ.assign(columnK, Functions.minusMult(L.get(j, k))); } } } }
private void decompose(Matrix a) { int n = a.rowSize(); L.assign(a); // column-wise submatrix cholesky with simple pivoting for (int k = 0; k < n; k++) { double akk = L.get(k, k); // set upper part of column to 0. L.viewColumn(k).viewPart(0, k).assign(0); double epsilon = 1.0e-10 * L.viewColumn(k).aggregate(Functions.MAX, Functions.ABS); if (akk <= epsilon) { // degenerate column case. Set diagonal to 1, all others to zero L.viewColumn(k).viewPart(k, n - k).assign(0); isPositiveDefinite = false; // no need to subtract from remaining sub-matrix } else { // normalize column by diagonal element akk = Math.sqrt(Math.max(0, akk)); L.set(k, k, akk); L.viewColumn(k).viewPart(k + 1, n - k - 1).assign(Functions.div(akk)); // now subtract scaled version of column for (int j = k + 1; j < n; j++) { Vector columnJ = L.viewColumn(j).viewPart(j, n - j); Vector columnK = L.viewColumn(k).viewPart(j, n - j); columnJ.assign(columnK, Functions.minusMult(L.get(j, k))); } } } }
nrm = -nrm; QRcolumnsPart[k].assign(Functions.div(nrm));
nrm = -nrm; QRcolumnsPart[k].assign(Functions.div(nrm));
@Test public void testInitialization() { // Start with super clusterable data. List<? extends WeightedVector> data = cubishTestData(0.01); // Just do initialization of ball k-means. This should drop a point into each of the clusters. BallKMeans r = new BallKMeans(new BruteSearch(new SquaredEuclideanDistanceMeasure()), 6, 20); r.cluster(data); // Put the centroids into a matrix. Matrix x = new DenseMatrix(6, 5); int row = 0; for (Centroid c : r) { x.viewRow(row).assign(c.viewPart(0, 5)); row++; } // Verify that each column looks right. Should contain zeros except for a single 6. final Vector columnNorms = x.aggregateColumns(new VectorFunction() { @Override public double apply(Vector f) { // Return the sum of three discrepancy measures. return Math.abs(f.minValue()) + Math.abs(f.maxValue() - 6) + Math.abs(f.norm(1) - 6); } }); // Verify all errors are nearly zero. assertEquals(0, columnNorms.norm(1) / columnNorms.size(), 0.1); // Verify that the centroids are a permutation of the original ones. SingularValueDecomposition svd = new SingularValueDecomposition(x); Vector s = svd.getS().viewDiagonal().assign(Functions.div(6)); assertEquals(5, s.getLengthSquared(), 0.05); assertEquals(5, s.norm(1), 0.05); }