/** * Returns relative frequency of class over all bags. */ public final double prob(int classIndex) { if (!Utils.eq(totaL, 0)) { return m_perClass[classIndex] / totaL; } else { return 0; } }
/** * Returns relative frequency of class over all bags. */ public final double prob(int classIndex) { if (!Utils.eq(totaL, 0)) { return m_perClass[classIndex] / totaL; } else { return 0; } }
/** * Normalizes a given value of a numeric attribute. * * @param x the value to be normalized * @param i the attribute's index */ private double norm(double x, int i) { if (Double.isNaN(m_Min[i]) || Utils.eq(m_Max[i], m_Min[i])) { return 0; } else { return (x - m_Min[i]) / (m_Max[i] - m_Min[i]); } }
/** * Normalizes a given value of a numeric attribute. * * @param x the value to be normalized * @param i the attribute's index * @return the normalized value */ private double norm(double x, int i) { if (Double.isNaN(m_Min[i]) || Utils.eq(m_Max[i], m_Min[i])) { return 0; } else { return (x - m_Min[i]) / (m_Max[i] - m_Min[i]); } }
/** * Normalizes a given value of a numeric attribute. * * @param x the value to be normalized * @param i the attribute's index */ private double norm(double x, int i) { if (Double.isNaN(m_Min[i]) || Utils.eq(m_Max[i], m_Min[i])) { return 0; } else { return (x - m_Min[i]) / (m_Max[i] - m_Min[i]); } }
/** * Normalizes a given value of a numeric attribute. * * @param x the value to be normalized * @param i the attribute's index * @return the normalized value */ private double norm(double x, int i) { if (Double.isNaN(m_minArray[i]) || Utils.eq(m_maxArray[i], m_minArray[i])) { return 0; } else { return (x - m_minArray[i]) / (m_maxArray[i] - m_minArray[i]); } }
/** * Normalizes a given value of a numeric attribute. * * @param x the value to be normalized * @param i the attribute's index * @return the normalized value */ protected double norm(double x, int i) { if (Double.isNaN(m_Min[i]) || Utils.eq(m_Max[i], m_Min[i])) { return 0; } else { return (x - m_Min[i]) / (m_Max[i] - m_Min[i]); } }
/** * Normalizes a given value of a numeric attribute. * * @param x the value to be normalized * @param i the attribute's index * @return the normalized value */ protected double norm(double x, int i) { if (Double.isNaN(m_Min[i]) || Utils.eq(m_Max[i], m_Min[i])) { return 0; } else { return (x - m_Min[i]) / (m_Max[i] - m_Min[i]); } }
/** * Normalizes a given value of a numeric attribute. * * @param x the value to be normalized * @param i the attribute's index * @return the normalized value */ private double norm(double x, int i) { if (Double.isNaN(m_minArray[i]) || Utils.eq(m_maxArray[i], m_minArray[i])) { return 0; } else { return (x - m_minArray[i]) / (m_maxArray[i] - m_minArray[i]); } }
/** * Normalizes a given value of a numeric attribute. * * @param x the value to be normalized * @param i the attribute's index * @return the normalized value */ private double norm(double x,int i) { if (Double.isNaN(m_MinArray[i]) || Utils.eq(m_MaxArray[i], m_MinArray[i])) { return 0; } else { return (x - m_MinArray[i]) / (m_MaxArray[i] - m_MinArray[i]); } }
/** * CalcLogLoss. * @param R y * @param P p(y==1) * @param C limit * @return Log loss */ public static double calcLogLoss(double R, double P, double C) { // base 2 ? double ans = Math.min(Utils.eq(R,P) ? 0.0 : -( (R * Math.log(P)) + ((1.0 - R) * Math.log(1.0 - P)) ),C); return (Double.isNaN(ans) ? 0.0 : ans); }
/** * L_LogLoss - the log loss between real-valued confidence rpred and true prediction y. * @param y label * @param rpred prediction (confidence) * @param C limit (maximum loss of log(C)) * @return Log loss */ public static double L_LogLoss(double y, double rpred, double C) { if (y == -1) { return 0.0; } // base 2 ? double ans = Math.min(Utils.eq(y,rpred) ? 0.0 : -( (y * Math.log(rpred)) + ((1.0 - y) * Math.log(1.0 - rpred)) ),C); return (Double.isNaN(ans) ? 0.0 : ans); }
/** * L_LogLoss - the log loss between real-valued confidence rpred and true prediction y. * @param y label * @param rpred prediction (confidence) * @param C limit (maximum loss of log(C)) * @return Log loss */ public static double L_LogLoss(double y, double rpred, double C) { if (y == -1) { return 0.0; } // base 2 ? double ans = Math.min(Utils.eq(y,rpred) ? 0.0 : -( (y * Math.log(rpred)) + ((1.0 - y) * Math.log(1.0 - rpred)) ),C); return (Double.isNaN(ans) ? 0.0 : ans); }
@Override Predicate.Eval evaluate(double[] input, double value, int fieldIndex) { return Predicate.booleanToEval( Utils.isMissingValue(input[fieldIndex]), weka.core.Utils.eq(input[fieldIndex], value)); }
/** * Prints this antecedent * * @return a textual description of this antecedent */ @Override public String toString() { String symbol = Utils.eq(value, 0.0) ? " <= " : " > "; return (att.name() + symbol + Utils.doubleToString(splitPoint, 6)); }
@Override Predicate.Eval evaluate(double[] input, double value, int fieldIndex) { return Predicate.booleanToEval( Utils.isMissingValue(input[fieldIndex]), weka.core.Utils.eq(input[fieldIndex], value)); }
/** * This method is a straightforward implementation of the information gain * criterion for the given distribution. */ @Override public final double splitCritValue(Distribution bags) { double numerator; numerator = oldEnt(bags) - newEnt(bags); // Splits with no gain are useless. if (Utils.eq(numerator, 0)) { return Double.MAX_VALUE; } // We take the reciprocal value because we want to minimize the // splitting criterion's value. return bags.total() / numerator; }
/** * Computes estimated errors for leaf. * * @param theDistribution the distribution to use * @return the estimated errors */ private double getEstimatedErrorsForDistribution(Distribution theDistribution){ if (Utils.eq(theDistribution.total(),0)) return 0; else return theDistribution.numIncorrect()+ Stats.addErrs(theDistribution.total(), theDistribution.numIncorrect(),m_CF); }
/** * Computes estimated errors for leaf. * * @param theDistribution the distribution to use * @return the estimated errors */ private double getEstimatedErrorsForDistribution(Distribution theDistribution){ if (Utils.eq(theDistribution.total(),0)) return 0; else return theDistribution.numIncorrect()+ Stats.addErrs(theDistribution.total(), theDistribution.numIncorrect(),m_CF); }
public void testTypical() { Instances result = useFilter(); // Number of attributes and instances shouldn't change assertEquals(m_Instances.numAttributes(), result.numAttributes()); assertEquals(m_Instances.numInstances(), result.numInstances()); // Check conversion is OK for (int j = 0; j < result.numAttributes(); j++) { if (result.attribute(j).isNumeric()) { double mean = result.meanOrMode(j); assertTrue("Mean should be 0", Utils.eq(mean, 0)); double stdDev = Math.sqrt(result.variance(j)); assertTrue("StdDev should be 1 (or 0)", Utils.eq(stdDev, 0) || Utils.eq(stdDev, 1)); } } }