@Override public double density(double x) { if (x < lowerBound || x > upperBound) { return 0.0; } else { return unnormalized.density(x) / reZ; } }
public double score(PlaneGeneral3D_F64 plane, Point3D_F64 point) { double e = Distance3D_F64.distance(plane, point); // gaussian best for fitting, small 1/(e*e) component for large errors return d.density(e) + distAmplitude * 1e-12 / (1e-10 + (e * e)); } };
/** * @param mean The mean of the normal to be used. * @param sd The standard deviation of the normal to be used. * @param value The domain value for the PDF. * @return Ibid. */ public double normalPdf(double mean, double sd, double value) { return new NormalDistribution(randomGenerator, mean, sd).density(value); }
public static ScoringFunction<PlaneGeneral3D_F64, Point3D_F64> getGaussianSqauresMixedError(double sigma) { final NormalDistribution d = new NormalDistribution(0, sigma); final double distAmplitude = d.density(0); return new ScoringFunction<PlaneGeneral3D_F64, Point3D_F64>() { public double score(PlaneGeneral3D_F64 plane, Point3D_F64 point) { double e = Distance3D_F64.distance(plane, point); // gaussian best for fitting, small 1/(e*e) component for large errors return d.density(e) + distAmplitude * 1e-12 / (1e-10 + (e * e)); } }; }
public void setNormalPointsAndWeights(double mean, double sd){ points[0] = min; for(int i=1;i<numberOfPoints;i++){ points[i] = points[i-1]+step; } //compute weights normal = new NormalDistribution(mean, sd); weights = new double[numberOfPoints]; double densitySum = 0.0; for(int i=0;i<numberOfPoints;i++){ weights[i] = normal.density(points[i]); densitySum += weights[i]; } //make sure probabilities sum to unity for(int i=0;i<numberOfPoints;i++){ weights[i] = weights[i]/densitySum; } }
static public double probability(GaussianDistribution gaussianDistribution, Number x){ NormalDistribution distribution = new NormalDistribution(gaussianDistribution.getMean(), Math.sqrt(gaussianDistribution.getVariance())); return distribution.density(x.doubleValue()); }
private static double computeEffectiveMean(double m, double s, double lb) { final NormalDistribution normal = new NormalDistribution(); final double alpha = (lb - m) / s; final double pdf = normal.density(alpha); final double cdf = normal.cumulativeProbability(alpha); final double lambda = pdf / (1 - cdf); return m + s * lambda; }
static NormalDistribution dist = new NormalDistribution(); static double density(double x, double mean, double sd) { double scaledx = (x-mean)/sd; return dist.density(scaledx); } static double cumulativeProbability(double x, double mean, double sd) { double scaledx = (x-mean)/sd; return dist.cumulativeProbability(scaledx); }
public static void main(String[] args) { System.out.println("Begin: RankSumPdf"); for( double x = 0.0; x > -100; x -= 1.0 ) { Apfloat cdf = NormalDistribution.cdf(x, 0.0, 1.0); Gpr.debug("x: " + x + "\tcdf: " + cdf + "\tcdfOri: " + DistLib.normal.cumulative(x, 0.0, 1.0) + "\t" + new org.apache.commons.math3.distribution.NormalDistribution(0.0, 1.0).density(x)); } System.out.println("End: RankSumPdf"); }
public double value(){ double[] thresholds = null; thresholds = getThresholds(); double thresholdProbSum = 0.0; for(int i=0;i<thresholds.length;i++){ thresholdProbSum+=norm.density(thresholds[i]); } if(thresholdProbSum==0.0) return Double.NaN; double n = (double)freqY.getSumFreq(); double psr = Math.sqrt((n-1.0)/n)*sdY.getResult()*r.value()/thresholdProbSum; return psr; }
public double value(double x){ double z = 0.0; double prbZ = 0.0; double loglik = 0.0; double tauStar = 0.0; double tauStarM1 = 0.0; double dif = 0.0; for(int i=0;i<dataX.length;i++){ z = (dataX[i]-meanX.getResult())-sdX.getResult(); prbZ = normal.density(z); tauStar = (alpha[dataY[i]-1] - x*z)/Math.sqrt(1 - x*x); if(dataY[i]>1){ tauStarM1 = (alpha[dataY[i]-1] - x*z)/Math.sqrt(1 - x*x); }else{ tauStarM1 = -10;//some large number greater than negative infinity } dif = normal.cumulativeProbability(tauStar) - normal.cumulativeProbability(tauStarM1); loglik += Math.log(prbZ*dif); } return -loglik; }
/** * computes the loglikelihood of a responseVector vector at a given value of theta. * * @param theta examinee ability * @return */ public double logLikelihood(double theta){ if(responseVector.getValidResponseCount() <= 0) return Double.NaN; double ll = 0.0; double prob = 0.0; byte resp = 0; VariableName varName = null; for(int i=0;i< responseVector.getNumberOfItems();i++){ resp = responseVector.getResponseAt(i); if(resp!=-1){ prob = irm[i].probability(theta, resp); prob = Math.min(Math.max(0.00001, prob), 0.99999); ll += Math.log(prob); } } if(method == EstimationMethod.MAP){ ll += Math.log(mapPrior.density(theta)); } return ll; }
/** * Calculates the weighted mean, where the weight is based on the rank difference * from the median and normally distributed. The std-dev for the normal distribution * is set to X.length / 4, so about two-thirds of the weight is contained within * the middle half of all points. The std-dev is bottom capped at 3. * * @param X * @return */ public static double robustMean(double[] X) { X = ArrayUtils.clone(X); Arrays.sort(X); NormalDistribution dist = new NormalDistribution( X.length / 2, // heaviest weight at midpoint FastMath.max(3, X.length / 4)); // 66% of the weight within 3 pts on either side double sum = 0.0; double weight = 0.0; for (int i = 0; i < X.length; i++) { double d = dist.density(i); weight += d; sum += X[i] * d; } return sum / weight; }
/** * Calculates the weighted mean, where the weight is based on the rank difference * from the median and normally distributed. The std-dev for the normal distribution * is set to X.length / 4, so about two-thirds of the weight is contained within * the middle half of all points. The std-dev is bottom capped at 3. * * @param X * @return */ public static double robustMean(double[] X) { X = ArrayUtils.clone(X); Arrays.sort(X); NormalDistribution dist = new NormalDistribution( X.length / 2, // heaviest weight at midpoint FastMath.max(3, X.length / 4)); // 66% of the weight within 3 pts on either side double sum = 0.0; double weight = 0.0; for (int i = 0; i < X.length; i++) { double d = dist.density(i); weight += d; sum += X[i] * d; } return sum / weight; }
public TruncatedNormal(RandomGenerator rng, double mu, double sigma, double lowerBound, double upperBound) { super(rng); this.mu = mu; this.sigma = sigma; this.lowerBound = lowerBound; this.upperBound = upperBound; unnormalized = new NormalDistribution(mu, sigma); if (upperBound < lowerBound) { throw new IllegalArgumentException("upper bound must be no lower than lower bound"); } lowerZ = unnormalized.cumulativeProbability(lowerBound); upperZ = 1 - unnormalized.cumulativeProbability(upperBound); reZ = 1 - (lowerZ + upperZ); NormalDistribution standardNormal = new NormalDistribution(); double alpha = (lowerBound - mu) / sigma; double beta = (upperBound - mu) / sigma; double phiAlpha = standardNormal.density(alpha); double phiBeta = standardNormal.density(beta); double zPhiAlpha = standardNormal.cumulativeProbability(alpha); double zPhiBeta = standardNormal.cumulativeProbability(beta); double denom = (zPhiBeta - zPhiAlpha); double c = (phiBeta - phiAlpha) / denom; mean = mu - sigma * c; double d = 1 - c * c; if (phiBeta > 0.0) d -= beta * phiBeta / denom; if (phiAlpha > 0.0) d += alpha * phiAlpha / denom; variance = (sigma * sigma) * d; }
/** * Correct polyserial correlation for spuriousness due to including the studied * item score Y in the computation of X values. This method is used for the * polyserial correlation in an item analysis. * * @return correlation corrected for spuriousness */ public double spuriousCorrectedValue(){ double[] thresholds = null; double correctedR = spuriousCorrectedPearsonCorrelation(); thresholds = getThresholds(); double thresholdProbSum = 0.0; for(int i=0;i<thresholds.length;i++){ thresholdProbSum+=norm.density(thresholds[i]); } if(thresholdProbSum==0.0) return Double.NaN; double n = (double)freqY.getSumFreq(); double psr = Math.sqrt((n-1.0)/n)*sdY.getResult()*correctedR/thresholdProbSum; return psr; }
/** * Returns the overall probability that the observed posture is performing a pointing the gesture on the specified side. * * @param posture the observed posture. * @param right true, if the arm for which the probability is calculated is the right (not left) one. * @return The probability of the tracked person pointing with the specified arm calculated based on empirical data. */ public static double pointingProbability(final TrackedPosture3DFloat posture, final boolean right) { //Recommended threshold: 0.3/0.4 final double elbowAngle = getElbowAngle(posture, right); final double handHeightAngle = getHandHeightAngle(posture, right, false); final double heightFactor = 0.5 + 0.5 * Math.tanh((140 - handHeightAngle) / 20); final double expectedElbowAngle = handHeightAngle >= 60 ? 180 : (handHeightAngle - 20) * 0.75 + 150; final double extension_factor = (new NormalDistribution(expectedElbowAngle, 40)).density(elbowAngle) * 100; return Math.min(heightFactor * extension_factor, 1.0) * PostureFunctions.postureConfidence(posture, right); }
/** * Does the same as pointingProbability except using a different model that lead to a higher AUC for the training data, but does not offer thresholds of a similar feasability. * * @param posture the observed posture. * @param right true, if the arm for which the probability is calculated is the right (not left) one. * @return The probability of the tracked person pointing with the specified arm calculated based on empirical data. */ private double pointingProbabilityHigherAUC(final TrackedPosture3DFloat posture, final boolean right) { final double elbowAngle = getElbowAngle(posture, right); final double handHeightAngle = getHandHeightAngle(posture, right, true); final double heightFactor = 0.5 + 0.5 * Math.tanh((107 - handHeightAngle) / 18); final double expectedElbowAngle = handHeightAngle >= 80 ? 180 : (handHeightAngle - 4) * 30 / 76 + 150; final double extension_factor = (new NormalDistribution(expectedElbowAngle, 40)).density(elbowAngle) * 100; return Math.min(heightFactor * extension_factor, 1.0) * PostureFunctions.postureConfidence(posture, right); } }
public RegDataSet univarNormal(){ NormalDistribution normalDistribution = new NormalDistribution(0,1); RegDataSet dataSet = RegDataSetBuilder.getBuilder() .numDataPoints(numDataPoints) .numFeatures(1) .dense(true) .missingValue(false) .build(); for (int i=0;i<numDataPoints;i++){ double featureValue = Sampling.doubleUniform(-1,1); double label; label = normalDistribution.density(featureValue); label += noise.sample(); dataSet.setFeatureValue(i,0,featureValue); dataSet.setLabel(i,label); } return dataSet; }
@Test public void testStandard() { assertTrue(TestUtils.checkThrows(() -> new TruncatedNormal(null, 0.0, 1.0, 1E-8, 0.0), IllegalArgumentException.class)); double mu = 0.0; for (double sigma : Arrays.asList(1.0, 0.5, 3.0)) { NormalDistribution sn = new NormalDistribution(mu, sigma); TruncatedNormal tr = new TruncatedNormal(null, mu, sigma, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); assertTrue(tr.isSupportConnected()); assertFalse(tr.isSupportLowerBoundInclusive()); assertFalse(tr.isSupportUpperBoundInclusive()); assertEquals(Double.NEGATIVE_INFINITY, tr.getSupportLowerBound(), tol); assertEquals(Double.POSITIVE_INFINITY, tr.getSupportUpperBound(), tol); for (double x : Arrays.asList(0.1, 0.5, 2.3, 0.0, -2.8, -10.0, -50.0, 50.0)) { assertEquals(sn.density(x), tr.density(x), tol); assertEquals(sn.cumulativeProbability(x), tr.cumulativeProbability(x), tol); assertEquals(sn.density(x), TruncatedNormal.densityNonTrunc(x, mu, sigma), tol); assertEquals(sn.cumulativeProbability(x), TruncatedNormal.cumulativeNonTrunc(x, mu, sigma), tol); for (double y : Arrays.asList(x - 1.5, x - 0.5, x, x + 0.5, x + 1.5)) { if (y < x) continue; assertEquals(sn.probability(x, y), tr.probability(x, y), tol); assertEquals(sn.probability(x, y), TruncatedNormal.probabilityNonTrunc(x, y, mu, sigma), tol); } } } }