double rNorm = lastNorm; while (rank < FastMath.min(rows, columns)) { double thisNorm = r.getSubMatrix(rank, rows - 1, rank, columns - 1).getFrobeniusNorm(); if (thisNorm == 0 || (thisNorm / lastNorm) * rNorm < dropThreshold) { break;
/** * <p>Calculates the variance-covariance matrix of the regression parameters. * </p> * <p>Var(b) = (X<sup>T</sup>X)<sup>-1</sup> * </p> * <p>Uses QR decomposition to reduce (X<sup>T</sup>X)<sup>-1</sup> * to (R<sup>T</sup>R)<sup>-1</sup>, with only the top p rows of * R included, where p = the length of the beta vector.</p> * * <p>Data for the model must have been successfully loaded using one of * the {@code newSampleData} methods before invoking this method; otherwise * a {@code NullPointerException} will be thrown.</p> * * @return The beta variance-covariance matrix * @throws org.apache.commons.math3.linear.SingularMatrixException if the design matrix is singular * @throws NullPointerException if the data for the model have not been loaded */ @Override protected RealMatrix calculateBetaVariance() { int p = getX().getColumnDimension(); RealMatrix Raug = qr.getR().getSubMatrix(0, p - 1 , 0, p - 1); RealMatrix Rinv = new LUDecomposition(Raug).getSolver().getInverse(); return Rinv.multiply(Rinv.transpose()); }
final RealMatrix a = m.getSubMatrix(0, splitIndex, 0, splitIndex); final RealMatrix b = m.getSubMatrix(0, splitIndex, splitIndex1, n - 1); final RealMatrix c = m.getSubMatrix(splitIndex1, n - 1, 0, splitIndex); final RealMatrix d = m.getSubMatrix(splitIndex1, n - 1, splitIndex1, n - 1);
components = components.getSubMatrix(0, components.getRowDimension()-1, 0, maxDimensions-1);
public TetradMatrix getPart(int i, int j, int k, int l) { return new TetradMatrix(apacheData.getSubMatrix(i, j, k, l)); }
public TetradMatrix getSelection(int[] rows, int[] cols) { if (rows.length == 0 || cols.length == 0) { return new TetradMatrix(rows.length, cols.length); } RealMatrix subMatrix = apacheData.getSubMatrix(rows, cols); return new TetradMatrix(subMatrix, rows.length, cols.length); }
/** * Singular Value Decomposition (SVD) based naive scoring. */ private double computeScoreSVD(@Nonnull final RealMatrix H, @Nonnull final RealMatrix G) { SingularValueDecomposition svdH = new SingularValueDecomposition(H); RealMatrix UT = svdH.getUT(); SingularValueDecomposition svdG = new SingularValueDecomposition(G); RealMatrix Q = svdG.getU(); // find the largest singular value for the r principal components RealMatrix UTQ = UT.getSubMatrix(0, r - 1, 0, window - 1) .multiply(Q.getSubMatrix(0, window - 1, 0, r - 1)); SingularValueDecomposition svdUTQ = new SingularValueDecomposition(UTQ); double[] s = svdUTQ.getSingularValues(); return 1.d - s[0]; }
double rNorm = lastNorm; while (rank < Math.min(rows, columns)) { double thisNorm = r.getSubMatrix(rank, rows - 1, rank, columns - 1).getFrobeniusNorm(); if (thisNorm == 0 || (thisNorm / lastNorm) * rNorm < dropThreshold) { break;
@Override public double[] gradientAt(double[] param){ double[] sqrtPsi = new double[nVariables]; double[] invSqrtPsi = new double[nVariables]; for(int i=0;i<nVariables;i++){ param[i] = Math.max(0.005, param[i]);//ensure that no parameters are negative sqrtPsi[i] = Math.sqrt(param[i]); invSqrtPsi[i] = 1.0/Math.sqrt(param[i]); } DiagonalMatrix diagPsi = new DiagonalMatrix(param); DiagonalMatrix diagSqtPsi = new DiagonalMatrix(sqrtPsi); RealMatrix SC = new DiagonalMatrix(invSqrtPsi); RealMatrix Sstar = SC.multiply(R).multiply(SC); EigenDecomposition E = new EigenDecomposition(Sstar); RealMatrix L = E.getV().getSubMatrix(0,nVariables-1, 0, nFactors-1); double[] ev = new double[nFactors]; for(int i=0;i<nFactors;i++){ ev[i] = Math.sqrt(Math.max(E.getRealEigenvalue(i) - 1, 0)); } DiagonalMatrix M = new DiagonalMatrix(ev); RealMatrix LOAD = L.multiply(M); RealMatrix LL = diagSqtPsi.multiply(LOAD); RealMatrix G = LL.multiply(LL.transpose()).add(diagPsi).subtract(R); double[] gradient = new double[nVariables]; for(int i=0;i<nVariables;i++){ gradient[i] = G.getEntry(i,i)/(param[i]*param[i]); } return gradient; }
RealMatrix L = E.getV().getSubMatrix(0,nVariables-1, 0, nFactors-1); double[] ev = new double[nFactors]; for(int i=0;i<nFactors;i++){
RealMatrix L = E.getV().getSubMatrix(0,nVariables-1, 0, nFactors-1); double[] ev = new double[nFactors]; for(int i=0;i<nFactors;i++){
public double valueAt(double[] param){ for(int i=0;i<nVariables;i++){ R.setEntry(i,i,1.0-param[i]); } EigenDecomposition eigen = new EigenDecomposition(R); RealMatrix eigenVectors = eigen.getV().getSubMatrix(0,nVariables-1, 0, nFactors-1); double[] ev = new double[nFactors]; for(int i=0;i<nFactors;i++){ ev[i] = Math.sqrt(eigen.getRealEigenvalue(i)); } DiagonalMatrix evMatrix = new DiagonalMatrix(ev);//USE Apache version of Diagonal matrix when upgrade to version 3.2 RealMatrix LAMBDA = eigenVectors.multiply(evMatrix); RealMatrix SIGMA = (LAMBDA.multiply(LAMBDA.transpose())); RealMatrix RESID = R.subtract(SIGMA); double sum = 0.0; for(int i=0;i<RESID.getRowDimension();i++){ for(int j=0;j<RESID.getColumnDimension();j++){ if(i!=j){ sum += Math.pow(RESID.getEntry(i,j),2); } } } return sum; }
double rNorm = lastNorm; while (rank < FastMath.min(rows, columns)) { double thisNorm = r.getSubMatrix(rank, rows - 1, rank, columns - 1).getFrobeniusNorm(); if (thisNorm == 0 || (thisNorm / lastNorm) * rNorm < dropThreshold) { break;
RealVector x = T.getSubMatrix(i, i + 1, i, i).getColumnVector(0); x = unitL2norm(x); RealMatrix subR = R.getSubMatrix(i, i + 1, 0, n - 1); R.setSubMatrix( subR.subtract(x.outerProduct(subR.preMultiply(x)).scalarMultiply(2)).getData(), i, 0); RealMatrix subQt = Qt.getSubMatrix(i, i + 1, 0, n - 1); Qt.setSubMatrix( subQt.subtract(x.outerProduct(subQt.preMultiply(x)).scalarMultiply(2)).getData(), i,
/** * <p>Calculates the variance-covariance matrix of the regression parameters. * </p> * <p>Var(b) = (X<sup>T</sup>X)<sup>-1</sup> * </p> * <p>Uses QR decomposition to reduce (X<sup>T</sup>X)<sup>-1</sup> * to (R<sup>T</sup>R)<sup>-1</sup>, with only the top p rows of * R included, where p = the length of the beta vector.</p> * * <p>Data for the model must have been successfully loaded using one of * the {@code newSampleData} methods before invoking this method; otherwise * a {@code NullPointerException} will be thrown.</p> * * @return The beta variance-covariance matrix * @throws org.apache.commons.math3.linear.SingularMatrixException if the design matrix is singular * @throws NullPointerException if the data for the model have not been loaded */ @Override protected RealMatrix calculateBetaVariance() { int p = getX().getColumnDimension(); RealMatrix Raug = qr.getR().getSubMatrix(0, p - 1 , 0, p - 1); RealMatrix Rinv = new LUDecomposition(Raug).getSolver().getInverse(); return Rinv.multiply(Rinv.transpose()); }
/** * <p>Calculates the variance-covariance matrix of the regression parameters. * </p> * <p>Var(b) = (X<sup>T</sup>X)<sup>-1</sup> * </p> * <p>Uses QR decomposition to reduce (X<sup>T</sup>X)<sup>-1</sup> * to (R<sup>T</sup>R)<sup>-1</sup>, with only the top p rows of * R included, where p = the length of the beta vector.</p> * * <p>Data for the model must have been successfully loaded using one of * the {@code newSampleData} methods before invoking this method; otherwise * a {@code NullPointerException} will be thrown.</p> * * @return The beta variance-covariance matrix * @throws org.apache.commons.math3.linear.SingularMatrixException if the design matrix is singular * @throws NullPointerException if the data for the model have not been loaded */ @Override protected RealMatrix calculateBetaVariance() { int p = getX().getColumnDimension(); RealMatrix Raug = qr.getR().getSubMatrix(0, p - 1 , 0, p - 1); RealMatrix Rinv = new LUDecomposition(Raug).getSolver().getInverse(); return Rinv.multiply(Rinv.transpose()); }
/** * Find the offset of the ellipsoid. * * @param a the algebraic from of the polynomial. * @return a vector containing the offset of the ellipsoid. */ private RealVector findCenter(RealMatrix a) { RealMatrix subA = a.getSubMatrix(0, 2, 0, 2); for (int q = 0; q < subA.getRowDimension(); q++) { for (int s = 0; s < subA.getColumnDimension(); s++) { subA.multiplyEntry(q, s, -1.0); } } RealVector subV = a.getRowVector(3).getSubVector(0, 3); // inv (dtd) DecompositionSolver solver = new SingularValueDecomposition(subA) .getSolver(); RealMatrix subAi = solver.getInverse(); return subAi.operate(subV); }
RealMatrix eigenVectors = eigen.getV().getSubMatrix(0,nVariables-1, 0, nFactors-1);
this.stdError = Math.sqrt(errorVariance); this.residuals = createResidualsFrame(residuals); final RealMatrix rAug = decomposition.getR().getSubMatrix(0, p - 1, 0, p - 1); final RealMatrix rInv = new LUDecomposition(rAug).getSolver().getInverse(); final RealMatrix covMatrix = rInv.multiply(rInv.transpose()).scalarMultiply(errorVariance);
RealMatrix subr = r.getSubMatrix(0, 2, 0, 2);