@Override public DiscreteMixture.Component M(int[] x, double[] posteriori) { double alpha = 0.0; double mean = 0.0; for (int i = 0; i < x.length; i++) { alpha += posteriori[i]; mean += x[i] * posteriori[i]; } mean /= alpha; DiscreteMixture.Component c = new DiscreteMixture.Component(); c.priori = alpha; c.distribution = new PoissonDistribution(mean); return c; }
@Override public double rand() { double x = 0.0; for (int i = 0; i < nu; i++) { double norm = GaussianDistribution.getInstance().rand(); x += norm * norm; } return x; }
/** * Constructor. The mixture model will be learned from the given data with the * EM algorithm. * @param mixture the initial guess of mixture. Components may have * different distribution form. * @param data the training data. */ public ExponentialFamilyMixture(List<Component> mixture, double[] data) { this(mixture); EM(components, data); }
/** * Create a new Normal distribution with the given mean and standard deviation. * * @param mean the mean of the distribution. * @param stdev the standard deviation of the distribution. */ public Normal(final double mean, final double stdev) { this.dist = new GaussianDistribution(mean, stdev); this.mean = mean; this.stdev = stdev; }
/** * Create a new Student's t distribution with the given degrees of freedom. * * @param df the degrees of freedom for this distribution. */ public StudentsT(final int df) { this.dist = new TDistribution(df); this.df = df; }
@Override public double rand() { if (rng == null) { rng = new RejectionLogLogistic(); } return rng.rand(); }
/** * Constructor. The mixture model will be learned from the given data with the * EM algorithm. * @param mixture the initial guess of mixture. Components may have * different distribution form. * @param data the training data. */ public DiscreteExponentialFamilyMixture(List<Component> mixture, int[] data) { this(mixture); EM(components, data); }
/** * Constructor. The mixture model will be learned from the given with the * EM algorithm. * @param mixture the initial guess of mixture. Components may have * different distribution form. * @param data the training data. */ public MultivariateExponentialFamilyMixture(List<Component> mixture, double[][] data) { this(mixture); EM(components, data); }
/** * Inversion of CDF by bisection numeric root finding of "cdf(x) = p" * for continuous distribution. The default epsilon is 1E-6. */ protected double quantile(double p, double xmin, double xmax) { return quantile(p, xmin, xmax, 1.0E-6); }
public int rand() { // cases with only one possible result end here if (n == 0) { return addd; } int x = random(); // undo transformations return x * fak + addd; }
/** * The likelihood given a sample set following the distribution. */ @Override public double logLikelihood(double[] x) { double L = 0.0; for (double xi : x) L += logp(xi); return L; } }
/** * The likelihood given a sample set following the distribution. */ public double logLikelihood(int[] x) { double L = 0.0; for (double xi : x) L += logp(xi); return L; }
/** * The likelihood given a sample set following the distribution. */ @Override public double logLikelihood(double[][] x) { double L = 0.0; for (double[] xi : x) L += logp(xi); return L; } }
/** * Create a new Normal distribution with the given mean and standard deviation. * * @param mean the mean of the distribution. * @param stdev the standard deviation of the distribution. */ public Normal(final double mean, final double stdev) { this.dist = new GaussianDistribution(mean, stdev); this.mean = mean; this.stdev = stdev; }
/** * Create a new Student's t distribution with the given degrees of freedom. * * @param df the degrees of freedom for this distribution. */ public StudentsT(final int df) { this.dist = new TDistribution(df); this.df = df; }
/** * Standard EM algorithm which iteratively alternates * Expectation and Maximization steps until convergence. * * @param mixture the initial configuration. * @param x the input data. * @return log Likelihood */ double EM(List<Component> mixture, int[] x) { return EM(mixture, x, 0.2); }
/** * Standard EM algorithm which iteratively alternates * Expectation and Maximization steps until convergence. * * @param mixture the initial configuration. * @param x the input data. * @return log Likelihood */ double EM(List<Component> mixture, double[][] x) { return EM(mixture, x, 0.2); }
/** * Standard EM algorithm which iteratively alternates * Expectation and Maximization steps until convergence. * * @param mixture the initial configuration. * @param x the input data. * @return log Likelihood */ double EM(List<Component> mixture, double[] x) { return EM(mixture, x, 0.0); }
/** * Standard EM algorithm which iteratively alternates * Expectation and Maximization steps until convergence. * * @param mixture the initial configuration. * @param x the input data. * @param gamma the regularization parameter. * @return log Likelihood */ double EM(List<Component> mixture, int[] x, double gamma) { return EM(mixture, x, gamma, Integer.MAX_VALUE); }
/** * Standard EM algorithm which iteratively alternates * Expectation and Maximization steps until convergence. * * @param mixture the initial configuration. * @param x the input data. * @param gamma the regularization parameter. * @return log Likelihood */ double EM(List<Component> mixture, double[][] x, double gamma) { return EM(mixture, x, gamma, Integer.MAX_VALUE); }