/** * Calculates the Poisson distribution function using a normal * approximation. The {@code N(mean, sqrt(mean))} distribution is used * to approximate the Poisson distribution. The computation uses * "half-correction" (evaluating the normal distribution function at * {@code x + 0.5}). * * @param x Upper bound, inclusive. * @return the distribution function value calculated using a normal * approximation. */ public double normalApproximateProbability(int x) { // calculate the probability using half-correction return normal.cumulativeProbability(x + 0.5); }
private double cumulativeProbability(double x) { try { NormalDistribution normDist = new NormalDistribution(); return normDist.cumulativeProbability(x); } catch (IllegalArgumentException ex) { return Double.NaN; } }
public final double linkInv(double x) { switch(_link) { // case multinomial: // should not be used case identity: return x; case ologlog: return 1.0-Math.exp(-1.0*Math.exp(x)); case oprobit: return _dprobit.cumulativeProbability(x); case ologit: case logit: return 1.0 / (Math.exp(-x) + 1.0); case log: return Math.exp(x); case inverse: double xx = (x < 0) ? Math.min(-1e-5, x) : Math.max(1e-5, x); return 1.0 / xx; case tweedie: return _tweedie_link_power == 0 ?Math.max(2e-16,Math.exp(x)) :Math.pow(x, 1/ _tweedie_link_power); default: throw new RuntimeException("unexpected link function id " + this); } }
public final double linkInv(double x) { switch(_link) { // case multinomial: // should not be used case ologlog: return 1.0-Math.exp(-1.0*Math.exp(x)); case oprobit: return _dprobit.cumulativeProbability(x); case identity: return x; case ologit: case logit: return 1.0 / (Math.exp(-x) + 1.0); case log: return Math.exp(x); case inverse: double xx = (x < 0) ? Math.min(-1e-5, x) : Math.max(1e-5, x); return 1.0 / xx; case tweedie: return _link_power == 0 ?Math.max(2e-16,Math.exp(x)) :Math.pow(x, 1/ _link_power); default: throw new RuntimeException("unexpected link function id " + _link); } } public final double variance(double mu){
/** * @param Wmin smallest Wilcoxon signed rank value * @param N number of subjects (corresponding to x.length) * @return two-sided asymptotic p-value */ private double calculateAsymptoticPValue(final double Wmin, final int N) { final double ES = (double) (N * (N + 1)) / 4.0; /* Same as (but saves computations): * final double VarW = ((double) (N * (N + 1) * (2*N + 1))) / 24; */ final double VarS = ES * ((double) (2 * N + 1) / 6.0); // - 0.5 is a continuity correction final double z = (Wmin - ES - 0.5) / FastMath.sqrt(VarS); // No try-catch or advertised exception because args are valid // pass a null rng to avoid unneeded overhead as we will not sample from this distribution final NormalDistribution standardNormal = new NormalDistribution(null, 0, 1); return 2*standardNormal.cumulativeProbability(z); }
/** * @param Umin smallest Mann-Whitney U value * @param n1 number of subjects in first sample * @param n2 number of subjects in second sample * @return two-sided asymptotic p-value * @throws ConvergenceException if the p-value can not be computed * due to a convergence error * @throws MaxCountExceededException if the maximum number of * iterations is exceeded */ private double calculateAsymptoticPValue(final double Umin, final int n1, final int n2) throws ConvergenceException, MaxCountExceededException { /* long multiplication to avoid overflow (double not used due to efficiency * and to avoid precision loss) */ final long n1n2prod = (long) n1 * n2; // http://en.wikipedia.org/wiki/Mann%E2%80%93Whitney_U#Normal_approximation final double EU = n1n2prod / 2.0; final double VarU = n1n2prod * (n1 + n2 + 1) / 12.0; final double z = (Umin - EU) / FastMath.sqrt(VarU); // No try-catch or advertised exception because args are valid // pass a null rng to avoid unneeded overhead as we will not sample from this distribution final NormalDistribution standardNormal = new NormalDistribution(null, 0, 1); return 2 * standardNormal.cumulativeProbability(z); }
static public double cumulativeProbability(double x){ return NormalDistributionUtil.normalDistribution.cumulativeProbability(x); }
/** * @param mean The mean of the normal to be used. * @param sd The standard deviation of the normal to be used. * @param value The domain value for the CDF. * @return Ibid. */ public double normalCdf(double mean, double sd, double value) { return normal.cumulativeProbability((value - mean) / sd); // value = (value - mean) / sd; // return ProbUtils.normalCdf(value); }
@Override public double cumulativeProbability(double x) { if (x < lowerBound) { return 0.0; } else if (x > upperBound) { return 1.0; } else { // the renormalized clipped cumulative return (unnormalized.cumulativeProbability(x) - lowerZ) / reZ; } }
/** * Calculates the Poisson distribution function using a normal * approximation. The {@code N(mean, sqrt(mean))} distribution is used * to approximate the Poisson distribution. The computation uses * "half-correction" (evaluating the normal distribution function at * {@code x + 0.5}). * * @param x Upper bound, inclusive. * @return the distribution function value calculated using a normal * approximation. */ public double normalApproximateProbability(int x) { // calculate the probability using half-correction return normal.cumulativeProbability(x + 0.5); }
public void testNormalDist() throws MathException { DistributionFactory f = DistributionFactory.newInstance(); NormalDistribution n = f.createNormalDistribution(0.0d, 1.0d); double lastx = Double.NEGATIVE_INFINITY; double nextx = Double.NEGATIVE_INFINITY; for (int i=-100; i < 100; i++) { nextx = i / 100d; System.out.println(n.cumulativeProbability(lastx, nextx)); lastx = nextx; } }
/** * @return the probability associated with the most recently computed independence test. */ public double getPValue() { return 2.0 * (1.0 - normal.cumulativeProbability(abs(fisherZ))); }
/** * @return the probability associated with the most recently computed independence test. */ public double getPValue() { return 2.0 * (1.0 - normal.cumulativeProbability(abs(fisherZ))); }
private double gaussianNegativeProbability(double mean, double stddev) { NormalDistribution distribution = new NormalDistribution(mean, stddev); return distribution.cumulativeProbability(0.0); }
private void compute(double x, double median, double iqr){ NormalDistribution normal = new NormalDistribution(); if(iqr>0.0){ z = (x - median)/(0.74*iqr); pvalue = normal.cumulativeProbability(z); if(z>0.0){ pvalue = 1.0-pvalue; } } }
static protected double surrogateGaussP(DataSlice2D slice, float ixy, int binAlgo, int scount) { float zs = getSurrogateGaussDistribution(slice, ixy, binAlgo, scount); try { // Not so sure about getting the P-value from the statistic zs in this way... NormalDistribution normDist = new NormalDistribution(); return 1 - normDist.cumulativeProbability(zs); } catch (Exception ex) { return 1; } }
private static double computeEffectiveMean(double m, double s, double lb) { final NormalDistribution normal = new NormalDistribution(); final double alpha = (lb - m) / s; final double pdf = normal.density(alpha); final double cdf = normal.cumulativeProbability(alpha); final double lambda = pdf / (1 - cdf); return m + s * lambda; }
static NormalDistribution dist = new NormalDistribution(); static double density(double x, double mean, double sd) { double scaledx = (x-mean)/sd; return dist.density(scaledx); } static double cumulativeProbability(double x, double mean, double sd) { double scaledx = (x-mean)/sd; return dist.cumulativeProbability(scaledx); }
public double p(int N1[], int N2[], double weight[]) { double t = test(N1, N2, weight); if (t > 0) t = -t; double p = new org.apache.commons.math3.distribution.NormalDistribution(0, 1).cumulativeProbability(t); return p; }
public double getZScoreEquivalent(double zscore) { // compute zscore to CDF double cdf = (new NormalDistribution()).cumulativeProbability(zscore); // for normal distribution, mahalanobis distance is chi-squared // https://en.wikipedia.org/wiki/Mahalanobis_distance#Normal_distributions return (new ChiSquaredDistribution(p)).inverseCumulativeProbability(cdf); } }