/** * Multiply a matrix by a constant, storing the results in the input matrix. * * @param mat * the matrix * @param etat * the constant * @return the input matrix */ public static Matrix timesInplace(Matrix mat, double etat) { mat.scaleEquals(etat); return mat; }
/** * Computes the covariance of the distribution, which involves inverting * the precision matrix. * @return * covariance of the distribution, which involves inverting the * precision matrix. */ public Matrix getCovariance() { final double scale = this.degreesOfFreedom / (this.degreesOfFreedom-2.0); Matrix C = this.getPrecision().inverse(); C.scaleEquals(scale); return C; }
/** * Computes the covariance of the distribution, which involves inverting * the precision matrix. * @return * covariance of the distribution, which involves inverting the * precision matrix. */ public Matrix getCovariance() { final double scale = this.degreesOfFreedom / (this.degreesOfFreedom-2.0); Matrix C = this.getPrecision().inverse(); C.scaleEquals(scale); return C; }
/** * Computes the covariance of the distribution, which involves inverting * the precision matrix. * @return * covariance of the distribution, which involves inverting the * precision matrix. */ public Matrix getCovariance() { final double scale = this.degreesOfFreedom / (this.degreesOfFreedom-2.0); Matrix C = this.getPrecision().inverse(); C.scaleEquals(scale); return C; }
private void prepareNextRound(Matrix X, Matrix Y) { final int nfeatures = X.getNumRows(); final int nusers = X.getNumColumns(); final int ntasks = Y.getNumColumns(); // int ninstances = Y.getNumRows(); // Assume 1 instance! // only inits when the current params is null if (this.w == null){ initParams(X,Y,nfeatures, nusers, ntasks); // Number of words, users and tasks } final Double dampening = this.params.getTyped(BilinearLearnerParameters.DAMPENING); final double weighting = 1.0 - dampening ; logger.debug("... dampening w, u and bias by: " + weighting); // Adjust for weighting this.w.scaleEquals(weighting); this.u.scaleEquals(weighting); if(this.biasMode){ this.bias.scaleEquals(weighting); } // First expand Y s.t. blocks of rows contain the task values for each row of Y. // This means Yexp has (n * t x t) final SparseMatrix Yexp = expandY(Y); loss.setY(Yexp); }
if( sequenceWeight != 1.0 ) A.scaleEquals(sequenceWeight);
public double logEvaluate( Matrix input) { final int d = input.getNumRows(); Vector mean = input.getColumn(0); Matrix C = input.getSubMatrix(0,d-1,1,d); C.scaleEquals(1.0/this.covarianceDivisor); double lpg = this.gaussian.getProbabilityFunction().logEvaluate(mean); double lpiw = this.inverseWishart.getProbabilityFunction().logEvaluate(C); return lpg + lpiw; }
public double logEvaluate( Matrix input) { final int d = input.getNumRows(); Vector mean = input.getColumn(0); Matrix C = input.getSubMatrix(0,d-1,1,d); C.scaleEquals(1.0/this.covarianceDivisor); double lpg = this.gaussian.getProbabilityFunction().logEvaluate(mean); double lpiw = this.inverseWishart.getProbabilityFunction().logEvaluate(C); return lpg + lpiw; }
public double logEvaluate( Matrix input) { final int d = input.getNumRows(); Vector mean = input.getColumn(0); Matrix C = input.getSubMatrix(0,d-1,1,d); C.scaleEquals(1.0/this.covarianceDivisor); double lpg = this.gaussian.getProbabilityFunction().logEvaluate(mean); double lpiw = this.inverseWishart.getProbabilityFunction().logEvaluate(C); return lpg + lpiw; }
@Override public void sampleInto( final Random random, final int sampleCount, final Collection<? super Matrix> output) { final int d = this.gaussian.getInputDimensionality(); ArrayList<Matrix> covariances = this.inverseWishart.sample(random, sampleCount); for( Matrix covariance : covariances ) { Matrix meanAndCovariance = MatrixFactory.getDefault().createMatrix(d,d+1); meanAndCovariance.setSubMatrix(0, 1, covariance); covariance.scaleEquals(1.0/this.covarianceDivisor); this.gaussian.setCovariance(covariance); Vector mean = this.gaussian.sample(random); meanAndCovariance.setColumn(0, mean); output.add( meanAndCovariance ); } }
@Override public void sampleInto( final Random random, final int sampleCount, final Collection<? super Matrix> output) { final int d = this.gaussian.getInputDimensionality(); ArrayList<Matrix> covariances = this.inverseWishart.sample(random, sampleCount); for( Matrix covariance : covariances ) { Matrix meanAndCovariance = MatrixFactory.getDefault().createMatrix(d,d+1); meanAndCovariance.setSubMatrix(0, 1, covariance); covariance.scaleEquals(1.0/this.covarianceDivisor); this.gaussian.setCovariance(covariance); Vector mean = this.gaussian.sample(random); meanAndCovariance.setColumn(0, mean); output.add( meanAndCovariance ); } }
@Override public void sampleInto( final Random random, final int sampleCount, final Collection<? super Matrix> output) { final int d = this.gaussian.getInputDimensionality(); ArrayList<Matrix> covariances = this.inverseWishart.sample(random, sampleCount); for( Matrix covariance : covariances ) { Matrix meanAndCovariance = MatrixFactory.getDefault().createMatrix(d,d+1); meanAndCovariance.setSubMatrix(0, 1, covariance); covariance.scaleEquals(1.0/this.covarianceDivisor); this.gaussian.setCovariance(covariance); Vector mean = this.gaussian.sample(random); meanAndCovariance.setColumn(0, mean); output.add( meanAndCovariance ); } }
@Override public void update( Vector value) { // We've added another value. this.count++; // Compute the difference between the value and the current mean. final int dim = value.getDimensionality(); if (this.mean == null) { this.mean = VectorFactory.getDefault().createVector(dim); } Vector delta = value.minus(this.mean); // Update the mean based on the difference between the value // and the mean along with the new count. this.mean.plusEquals(delta.scale(1.0 / this.count)); // Update the squared differences from the mean, using the new // mean in the process. if (this.sumSquaredDifferences == null) { this.sumSquaredDifferences = MatrixFactory.getDefault().createIdentity(dim, dim); this.sumSquaredDifferences.scaleEquals( this.getDefaultCovariance()); } Vector delta2 = value.minus(this.mean); this.sumSquaredDifferences.plusEquals(delta.outerProduct(delta2)); }
@Override public void update( Vector value) { // We've added another value. this.count++; // Compute the difference between the value and the current mean. final int dim = value.getDimensionality(); if (this.mean == null) { this.mean = VectorFactory.getDefault().createVector(dim); } Vector delta = value.minus(this.mean); // Update the mean based on the difference between the value // and the mean along with the new count. this.mean.plusEquals(delta.scale(1.0 / this.count)); // Update the squared differences from the mean, using the new // mean in the process. if (this.sumSquaredDifferences == null) { this.sumSquaredDifferences = MatrixFactory.getDefault().createIdentity(dim, dim); this.sumSquaredDifferences.scaleEquals( this.getDefaultCovariance()); } Vector delta2 = value.minus(this.mean); this.sumSquaredDifferences.plusEquals(delta.outerProduct(delta2)); }
@Override public void update( Vector value) { // We've added another value. this.count++; // Compute the difference between the value and the current mean. final int dim = value.getDimensionality(); if (this.mean == null) { this.mean = VectorFactory.getDefault().createVector(dim); } Vector delta = value.minus(this.mean); // Update the mean based on the difference between the value // and the mean along with the new count. this.mean.plusEquals(delta.scale(1.0 / this.count)); // Update the squared differences from the mean, using the new // mean in the process. if (this.sumSquaredDifferences == null) { this.sumSquaredDifferences = MatrixFactory.getDefault().createIdentity(dim, dim); this.sumSquaredDifferences.scaleEquals( this.getDefaultCovariance()); } Vector delta2 = value.minus(this.mean); this.sumSquaredDifferences.plusEquals(delta.outerProduct(delta2)); }
@Override public void update( MultivariateGaussian target, Iterable<? extends Vector> data) { int N = CollectionUtil.size(data); Matrix Ci0 = target.getCovarianceInverse(); Matrix CiN = this.getKnownCovarianceInverse().clone(); if( N > 1 ) { CiN.scaleEquals(N); } Vector sampleMean = MultivariateStatisticsUtil.computeMean(data); Vector t0 = Ci0.times( target.getMean() ); t0.plusEquals( CiN.times( sampleMean ) ); // Saving another Matrix creation here... just make sure the // "t0" stuff gets completed first CiN.plusEquals(Ci0); Matrix updatedCovariance = CiN.inverse(); Vector updatedMean = updatedCovariance.times( t0 ); target.setMean(updatedMean); target.setCovariance(updatedCovariance); }
@Override public void update( MultivariateGaussian target, Iterable<? extends Vector> data) { int N = CollectionUtil.size(data); Matrix Ci0 = target.getCovarianceInverse(); Matrix CiN = this.getKnownCovarianceInverse().clone(); if( N > 1 ) { CiN.scaleEquals(N); } Vector sampleMean = MultivariateStatisticsUtil.computeMean(data); Vector t0 = Ci0.times( target.getMean() ); t0.plusEquals( CiN.times( sampleMean ) ); // Saving another Matrix creation here... just make sure the // "t0" stuff gets completed first CiN.plusEquals(Ci0); Matrix updatedCovariance = CiN.inverse(); Vector updatedMean = updatedCovariance.times( t0 ); target.setMean(updatedMean); target.setCovariance(updatedCovariance); }
@Override public void update( MultivariateGaussian target, Iterable<? extends Vector> data) { int N = CollectionUtil.size(data); Matrix Ci0 = target.getCovarianceInverse(); Matrix CiN = this.getKnownCovarianceInverse().clone(); if( N > 1 ) { CiN.scaleEquals(N); } Vector sampleMean = MultivariateStatisticsUtil.computeMean(data); Vector t0 = Ci0.times( target.getMean() ); t0.plusEquals( CiN.times( sampleMean ) ); // Saving another Matrix creation here... just make sure the // "t0" stuff gets completed first CiN.plusEquals(Ci0); Matrix updatedCovariance = CiN.inverse(); Vector updatedMean = updatedCovariance.times( t0 ); target.setMean(updatedMean); target.setCovariance(updatedCovariance); }
this.sumSquaredDifferencesInverse.scaleEquals( this.getDefaultCovarianceInverse());
if( n > 1 ) betahat.scaleEquals(n);