/** * Computes p-value for 2-sided, 1-sample t-test. * * @param m sample mean * @param mu constant to test against * @param v sample variance * @param n sample n * @return p-value * @throws MaxCountExceededException if an error occurs computing the p-value * @throws MathIllegalArgumentException if n is not greater than 1 */ protected double tTest(final double m, final double mu, final double v, final double n) throws MaxCountExceededException, MathIllegalArgumentException { final double t = FastMath.abs(t(m, mu, v, n)); // pass a null rng to avoid unneeded overhead as we will not sample from this distribution final TDistribution distribution = new TDistribution(null, n - 1); return 2.0 * distribution.cumulativeProbability(-t); }
TDistribution distribution = new TDistribution(n - 2); return getSlopeStdErr() * distribution.inverseCumulativeProbability(1d - alpha / 2d);
/** * {@inheritDoc} * * For degrees of freedom parameter {@code df}, the mean is * <ul> * <li>if {@code df > 1} then {@code 0},</li> * <li>else undefined ({@code Double.NaN}).</li> * </ul> */ public double getNumericalMean() { final double df = getDegreesOfFreedom(); if (df > 1) { return 0; } return Double.NaN; }
/** * Generates a random value from the {@link TDistribution T Distribution}. * * @param df the degrees of freedom of the T distribution * @return random value from the T(df) distribution * @throws NotStrictlyPositiveException if {@code df <= 0} */ public double nextT(double df) throws NotStrictlyPositiveException { return new TDistribution(getRandomGenerator(), df, TDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY).sample(); }
public double [] pValues(){ double [] res = zValues(); RealDistribution rd = _dispersionEstimated?new TDistribution(_training_metrics.residual_degrees_of_freedom()):new NormalDistribution(); for(int i = 0; i < res.length; ++i) res[i] = 2*rd.cumulativeProbability(-Math.abs(res[i])); return res; } double[][] _global_beta_multinomial;
try { final double residualDF = frame.rows().count() - (regressors.size() + 1); final TDistribution distribution = new TDistribution(residualDF); final double interceptParam = betaVector.getEntry(0); final double interceptStdError = intercept.data().getDouble(0, Field.STD_ERROR); final double interceptTStat = interceptParam / interceptStdError; final double interceptPValue = distribution.cumulativeProbability(-Math.abs(interceptTStat)) * 2d; final double interceptCI = interceptStdError * distribution.inverseCumulativeProbability(1d - alpha / 2d); this.intercept.data().setDouble(0, Field.PARAMETER, interceptParam); this.intercept.data().setDouble(0, Field.T_STAT, interceptTStat); final double betaStdError = betas.data().getDouble(regressor, Field.STD_ERROR); final double tStat = betaParam / betaStdError; final double pValue = distribution.cumulativeProbability(-Math.abs(tStat)) * 2d; final double betaCI = betaStdError * distribution.inverseCumulativeProbability(1d - alpha / 2d); this.betas.data().setDouble(regressor, Field.PARAMETER, betaParam); this.betas.data().setDouble(regressor, Field.T_STAT, tStat);
private void testLogPdfAtGivenDegreesOfFreedom(int v) { TDistribution apache = new TDistribution(v); StudentTVertex studentT = new StudentTVertex(v); for (double t = -4.5; t <= 4.5; t += 0.5) { double expected = apache.logDensity(t); double actual = studentT.logPdf(t); assertEquals(expected, actual, DELTA); } }
/** * @param param * degrees of freedom * @return T-distribution */ protected TDistribution getTDistribution(double param) { if (t == null || t.getDegreesOfFreedom() != param) { t = new TDistribution(param); } return t; }
/** * 1-sample t-test confidence interval */ public double[] confidenceInterval(final double m, final double v, final double n, final double alpha) { checkSignificanceLevel(alpha); //_distribution.setDegreesOfFreedom(n - 1); final double t = Math.abs(_distribution.inverseCumulativeProbability(alpha / 2)); final double val = t * Math.sqrt(v / n); return new double[] { m - val, m + val }; }
pValue = 2.0 * (1.0 - gettDistribution().cumulativeProbability(abs(t)));
/** {@inheritDoc} */ public double density(double x) { return FastMath.exp(logDensity(x)); }
int residualdf = regression.estimateResiduals().length-beta.length; for (int i=0; i < beta.length; i++){ double tstat = beta[i] / regression.estimateRegressionParametersStandardErrors()[i]; double pvalue = new TDistribution(residualdf).cumulativeProbability(-FastMath.abs(tstat))*2; System.out.println("p-value(" +i +") : " +pvalue ); }
tDist = new TDistribution(df); tCritical = tDist.inverseCumulativeProbability( (getConfLevel() + 1d) / 2); me = tCritical * se; df = n1 - 1; tDist = new TDistribution(df); tCritical = tDist.inverseCumulativeProbability( (getConfLevel() + 1d) / 2); me = tCritical * se; P = 2.0 * tDist.cumulativeProbability(-Math.abs(t)); P = adjustedPValue(P, t, tail);
/** * @param df The degrees of freedom. See any stats book. * @return Ibid. */ public double nextT(double df) { return new TDistribution(randomGenerator, df).sample(); }
@Test public void logProbGraphMatchesKnownLogDensityOfVector() { IntegerVertex v = ConstantVertex.of(1, 1); StudentTVertex studentT = new StudentTVertex(v); LogProbGraph logProbGraph = studentT.logProbGraph(); LogProbGraphValueFeeder.feedValue(logProbGraph, v, v.getValue()); LogProbGraphValueFeeder.feedValue(logProbGraph, studentT, DoubleTensor.create(-4.5, 4.5)); TDistribution distribution = new TDistribution(1); double expectedDensity = distribution.logDensity(-4.5) + distribution.logDensity(4.5); LogProbGraphContract.matchesKnownLogDensity(logProbGraph, expectedDensity); }
@Override public final void compute() { if (input[0].isDefined() && input[1].isDefined()) { double param = a.getDouble(); double val = b.getDouble(); try { TDistribution t = getTDistribution(param); num.setValue(t.inverseCumulativeProbability(val)); } catch (Exception e) { num.setUndefined(); } } else { num.setUndefined(); } }
/** {@inheritDoc} */ public double density(double x) { return Math.exp(logDensity(x)); }
/** * Computes p-value for 2-sided, 2-sample t-test, under the assumption * of equal subpopulation variances. * <p> * The sum of the sample sizes minus 2 is used as degrees of freedom.</p> * * @param m1 first sample mean * @param m2 second sample mean * @param v1 first sample variance * @param v2 second sample variance * @param n1 first sample n * @param n2 second sample n * @return p-value * @throws MaxCountExceededException if an error occurs computing the p-value * @throws NotStrictlyPositiveException if the estimated degrees of freedom is not * strictly positive */ protected double homoscedasticTTest(double m1, double m2, double v1, double v2, double n1, double n2) throws MaxCountExceededException, NotStrictlyPositiveException { final double t = FastMath.abs(homoscedasticT(m1, m2, v1, v2, n1, n2)); final double degreesOfFreedom = n1 + n2 - 2; // pass a null rng to avoid unneeded overhead as we will not sample from this distribution final TDistribution distribution = new TDistribution(null, degreesOfFreedom); return 2.0 * distribution.cumulativeProbability(-t); }
private static double getMarginOfError(double sd, double n, double confLevel) throws ArithmeticException { TDistribution tDist = new TDistribution(n - 1); double a = tDist.inverseCumulativeProbability((confLevel + 1d) / 2); return a * sd / Math.sqrt(n); }
private static int computeMinimum(final double interval, final double confidence) { final UnivariateFunction f = new UnivariateFunction() { @Override public double value(final double n) { double t; if (Math.ceil(n) == FastMath.floor(n)) { t = new TDistribution((int) n).inverseCumulativeProbability(1 - confidence / 2); } else { double t1 = new TDistribution((int) FastMath.ceil(n)).inverseCumulativeProbability((1 - confidence / 2)) * (n - Math.floor(n)); double t2 = new TDistribution((int) FastMath.floor(n)).inverseCumulativeProbability((1 - confidence / 2)) * (Math.ceil(n) - n); t = t1 + t2; } double value = 2 * t / n; return value - interval; } }; final BisectionSolver bs = new BisectionSolver(); return (int) Math.ceil(bs.solve(Integer.MAX_VALUE, f, 1, Integer.MAX_VALUE)); }