@Override public Matrix gradient(Matrix W) { return X.transpose().times(X.times(W).minus(Y)); }
@Override public Matrix predict(Matrix x) { final Matrix mult = this.u.transpose().times(x.transpose()).times(this.w); if(this.biasMode)mult.plusEquals(this.bias); final Vector ydiag = CFMatrixUtils.diag(mult); final Matrix createIdentity = SparseMatrixFactoryMTJ.INSTANCE.createIdentity(1, ydiag.getDimensionality()); createIdentity.setRow(0, ydiag); return createIdentity; } }
public Matrix predict(double[] x) { final Matrix X = prepareMatrix(x); final Matrix hiddenTimes = weightsL1.transpose().times(X); final Matrix hiddenVal = prepareMatrix(gMat.apply(hiddenTimes).getColumn(0)); final Matrix finalTimes = weightsL2.transpose().times(hiddenVal); final Matrix finalVal = gMat.apply(finalTimes); return finalVal; }
public Matrix predict(double[] x) { final Matrix X = prepareMatrix(x); final Matrix hiddenTimes = weightsL1.transpose().times(X); final Matrix hiddenVal = prepareMatrix(gMat.apply(hiddenTimes).getColumn(0)); final Matrix finalTimes = weightsL2.transpose().times(hiddenVal); final Matrix finalVal = gMat.apply(finalTimes); return finalVal; }
private Matrix prepareMatrix(double[] y) { final Matrix Y = DMF.createMatrix(1, y.length + 1); Y.setElement(0, 0, 1); Y.setSubMatrix(0, 1, DMF.copyArray(new double[][] { y })); return Y.transpose(); }
/** * Computes the prediction covariance from the Jacobian and believe * covariance * @param A * System Jacobian, which is estimated in the case of the EKF. * @param beliefCovariance * Covariance of the current state belief. * @return * Covariance of the prediction. */ public Matrix computePredictionCovariance( Matrix A, Matrix beliefCovariance ) { // Calculate the covariance, which will increase due to the // inherent uncertainty of the model. Matrix P = beliefCovariance; P = A.times( P ).times( A.transpose() ); P.plusEquals( this.modelCovariance ); return P; }
private Matrix prepareMatrix(double[] y) { final Matrix Y = DMF.createMatrix(1, y.length + 1); Y.setElement(0, 0, 1); Y.setSubMatrix(0, 1, DMF.copyArray(new double[][] { y })); return Y.transpose(); }
@Override final public Matrix pseudoInverse( final double effectiveZero) { ArgumentChecker.assertIsNonNegative("effectiveZero", effectiveZero); SVD svd = svdDecompose(); int min = Math.min(getNumRows(), getNumColumns()); for (int i = 0; i < min; ++i) { if (Math.abs(svd.Sigma.get(i, i)) <= effectiveZero) { svd.Sigma.setElement(i, i, 0); } else { svd.Sigma.setElement(i, i, 1.0 / svd.Sigma.get(i, i)); } } return svd.V.times(svd.Sigma.transpose()).times(svd.U.transpose()); }
private Matrix prepareMatrix(Vector y) { final Matrix Y = DMF.createMatrix(1, y.getDimensionality() + 1); Y.setElement(0, 0, 1); Y.setSubMatrix(0, 1, DMF.copyRowVectors(y)); return Y.transpose(); }
private Matrix prepareMatrix(Vector y) { final Matrix Y = DMF.createMatrix(1, y.getDimensionality() + 1); Y.setElement(0, 0, 1); Y.setSubMatrix(0, 1, DMF.copyRowVectors(y)); return Y.transpose(); }
@Override public Matrix gradient(Matrix W) { final Matrix resid = X.times(W).minus(Y); if (this.bias != null) resid.plusEquals(this.bias); for (int r = 0; r < Y.getNumRows(); r++) { final double yc = Y.getElement(r, 0); if (Double.isNaN(yc)) { resid.setElement(r, 0, 0); } } return X.transpose().times(resid); }
@Override final public Vector solve( final Vector b) { checkSolveDimensions(b); if (!isSquare()) { throw new IllegalStateException("Solve only works on square " + "matrices (this is " + getNumRows() + " x " + getNumColumns()); } QR qr = qrDecompose(); return upperTriangularSolve(qr.R, qr.Q.transpose().times(b)); }
/** * Scales the MultivariateGaussian by premultiplying by the given Matrix * * @param premultiplyMatrix Matrix against which to premultiply this * @return Scaled MultivariateGaussian */ public MultivariateGaussian scale( Matrix premultiplyMatrix) { Vector m = premultiplyMatrix.times(this.mean); Matrix C = premultiplyMatrix.times(this.getCovariance()).times( premultiplyMatrix.transpose()); return new MultivariateGaussian(m, C); }
@Override final public Vector solve( final Vector b) { checkSolveDimensions(b); if (!isSquare()) { throw new IllegalStateException("Solve only works on square " + "matrices (this is " + getNumRows() + " x " + getNumColumns()); } QR qr = qrDecompose(); return upperTriangularSolve(qr.R, qr.Q.transpose().times(b)); }
/** * Scales the MultivariateGaussian by premultiplying by the given Matrix * * @param premultiplyMatrix Matrix against which to premultiply this * @return Scaled MultivariateGaussian */ public MultivariateGaussian scale( Matrix premultiplyMatrix) { Vector m = premultiplyMatrix.times(this.mean); Matrix C = premultiplyMatrix.times(this.getCovariance()).times( premultiplyMatrix.transpose()); return new MultivariateGaussian(m, C); }
@Override final public Vector solve( final Vector b) { checkSolveDimensions(b); if (!isSquare()) { throw new IllegalStateException("Solve only works on square " + "matrices (this is " + getNumRows() + " x " + getNumColumns()); } QR qr = qrDecompose(); return upperTriangularSolve(qr.R, qr.Q.transpose().times(b)); }
/** * Scales the MultivariateGaussian by premultiplying by the given Matrix * * @param premultiplyMatrix Matrix against which to premultiply this * @return Scaled MultivariateGaussian */ public MultivariateGaussian scale( Matrix premultiplyMatrix) { Vector m = premultiplyMatrix.times(this.mean); Matrix C = premultiplyMatrix.times(this.getCovariance()).times( premultiplyMatrix.transpose()); return new MultivariateGaussian(m, C); }
public double sumLoss(List<Pair<Matrix>> pairs, Matrix u, Matrix w, Matrix bias, BilinearLearnerParameters params) { LossFunction loss = params.getTyped(BilinearLearnerParameters.LOSS); loss = new MatLossFunction(loss); double total = 0; int i = 0; for (final Pair<Matrix> pair : pairs) { final Matrix X = pair.firstObject(); final Matrix Y = pair.secondObject(); final SparseMatrix Yexp = BilinearSparseOnlineLearner.expandY(Y); final Matrix expectedAll = u.transpose().times(X.transpose()).times(w); loss.setY(Yexp); loss.setX(expectedAll); if (bias != null) loss.setBias(bias); logger.debug("Testing pair: " + i); total += loss.eval(null); // Assums an identity w. i++; } return total; } }
@Override public boolean test_backtrack(Matrix W, Matrix grad, Matrix prox, double eta) { Matrix tmp = prox.minus(W); double evalW = eval(W); double evalProx = eval(prox); Matrix fastdotGradTmp = CFMatrixUtils.fastdot(grad.transpose(),tmp); double normGradProx = CFMatrixUtils.sum(fastdotGradTmp); double normTmp = 0.5*eta*tmp.normFrobenius(); return (evalProx <= evalW + normGradProx + normTmp); }