break; double maxMinDist = smallestDistance(values[0]); furthestVal = values[0]; for (int i = 1; i < values.length; i++) { double minDist = smallestDistance(values[i]); if (minDist > maxMinDist) { maxMinDist = minDist; probs[nearestMean(values[i])][i] = 1.0; estimateParameters(values, weights, probs);
double leftVal = Math.exp(logDensity(min)); for (int i = 0; i < m_NumIntervals; i++) { double rightVal = Math.exp(logDensity(min + (i + 1) * delta)); probabilities[i] = 0.5 * (leftVal + rightVal) * delta; leftVal = rightVal;
/** * Computes log of density for given value. */ public double logDensity(double value) { double[] a = logJointDensities(value); double max = a[Utils.maxIndex(a)]; double sum = 0.0; for(int i = 0; i < a.length; i++) { sum += Math.exp(a[i] - max); } return max + Math.log(sum); }
double leftVal = Math.exp(logDensity(min)); for (int i = 0; i < m_NumIntervals; i++) { double rightVal = Math.exp(logDensity(min + (i + 1) * delta)); probabilities[i] = 0.5 * (leftVal + rightVal) * delta; leftVal = rightVal;
MM tempModel = new UnivariateMixtureEstimator().new MM(); tempModel.initializeModel(K, values, weights, m_Random); double MSE = tempModel.MSE(); probs[tempModel.nearestMean(values[i])][i] = 1.0; tempModel.estimateParameters(values, weights, probs); MSE = tempModel.MSE(); double loglikelihood = model.loglikelihood(values, weights); double[][] probs = new double[model.m_K][values.length]; while (Utils.gr(loglikelihood, oldLogLikelihood)){ double[] p = Utils.logs2probs(model.logJointDensities(values[i])); for (int j = 0; j < p.length; j++) { probs[j][i] = p[j]; model.estimateParameters(values, weights, probs); loglikelihood = model.loglikelihood(values, weights);
if (!inBag[j]) { double weight = m_Weights[j]; locLogLikelihood += weight * mixtureModel.logDensity(m_Values[j]); totalWeight += weight;
MM tempModel = new UnivariateMixtureEstimator().new MM(); tempModel.initializeModel(K, values, weights, m_Random); double MSE = tempModel.MSE(); probs[tempModel.nearestMean(values[i])][i] = 1.0; tempModel.estimateParameters(values, weights, probs); MSE = tempModel.MSE(); double loglikelihood = model.loglikelihood(values, weights); double[][] probs = new double[model.m_K][values.length]; while (Utils.gr(loglikelihood, oldLogLikelihood)){ double[] p = Utils.logs2probs(model.logJointDensities(values[i])); for (int j = 0; j < p.length; j++) { probs[j][i] = p[j]; model.estimateParameters(values, weights, probs); loglikelihood = model.loglikelihood(values, weights);
if (!inBag[j]) { double weight = m_Weights[j]; locLogLikelihood += weight * mixtureModel.logDensity(m_Values[j]); totalWeight += weight;
break; double maxMinDist = smallestDistance(values[0]); furthestVal = values[0]; for (int i = 1; i < values.length; i++) { double minDist = smallestDistance(values[i]); if (minDist > maxMinDist) { maxMinDist = minDist; probs[nearestMean(values[i])][i] = 1.0; estimateParameters(values, weights, probs);
double leftVal = Math.exp(logDensity(min)); for (int i = 0; i < m_NumIntervals; i++) { if (sum >= percentage) { return min + i * delta; double rightVal = Math.exp(logDensity(min + (i + 1) * delta)); sum += 0.5 * (leftVal + rightVal) * delta; leftVal = rightVal;
double loglikelihoodForOneCluster = bestMixtureModel.loglikelihood(m_Values, m_Weights); double bestNormalizedEntropy = 1; for (int i = 2; i <= m_MaxNumComponents; i++) { MM mixtureModel = buildModel(i, m_Values, m_Weights); double loglikelihood = mixtureModel.loglikelihood(m_Values, m_Weights); if (loglikelihood < loglikelihoodForOneCluster) {
double leftVal = Math.exp(logDensity(min)); for (int i = 0; i < m_NumIntervals; i++) { if (sum >= percentage) { return min + i * delta; double rightVal = Math.exp(logDensity(min + (i + 1) * delta)); sum += 0.5 * (leftVal + rightVal) * delta; leftVal = rightVal;
double loglikelihoodForOneCluster = bestMixtureModel.loglikelihood(m_Values, m_Weights); double bestNormalizedEntropy = 1; for (int i = 2; i <= m_MaxNumComponents; i++) { MM mixtureModel = buildModel(i, m_Values, m_Weights); double loglikelihood = mixtureModel.loglikelihood(m_Values, m_Weights); if (loglikelihood < loglikelihoodForOneCluster) {
/** * Returns the natural logarithm of the density estimate at the given * point. * * @param value the value at which to evaluate * @return the natural logarithm of the density estimate at the given * value */ public double logDensity(double value) { updateModel(); if (m_MixtureModel == null) { return Math.log(Double.MIN_VALUE); } return m_MixtureModel.logDensity(value); }
/** * Returns the natural logarithm of the density estimate at the given * point. * * @param value the value at which to evaluate * @return the natural logarithm of the density estimate at the given * value */ public double logDensity(double value) { updateModel(); if (m_MixtureModel == null) { return Math.log(Double.MIN_VALUE); } return m_MixtureModel.logDensity(value); }
/** * Calculates entrpy for given model and data. */ protected double entropy(MM mixtureModel) { double entropy = 0; for (int j = 0; j < m_NumValues; j++) { entropy += m_Weights[j] * ContingencyTables.entropy(Utils.logs2probs(mixtureModel.logJointDensities(m_Values[j]))); } entropy *= Utils.log2; // Need natural logarithm, not base-2 logarithm return entropy / (double)m_NumValues; }
/** * Computes loglikelihood of current model. */ public double loglikelihood(double[] values, double[] weights) { double sum = 0; double sumOfWeights = 0; for (int i = 0; i < values.length; i++) { sum += weights[i] * logDensity(values[i]); sumOfWeights += weights[i]; } return sum / sumOfWeights; }
/** * Returns the interval for the given confidence value. * * @param conf the confidence value in the interval [0, 1] * @return the interval */ public double[][] predictIntervals(double conf) { updateModel(); return m_MixtureModel.predictIntervals(conf); }
/** * Computes loglikelihood of current model. */ public double loglikelihood(double[] values, double[] weights) { double sum = 0; double sumOfWeights = 0; for (int i = 0; i < values.length; i++) { sum += weights[i] * logDensity(values[i]); sumOfWeights += weights[i]; } return sum / sumOfWeights; }
/** * Calculates entrpy for given model and data. */ protected double entropy(MM mixtureModel) { double entropy = 0; for (int j = 0; j < m_NumValues; j++) { entropy += m_Weights[j] * ContingencyTables.entropy(Utils.logs2probs(mixtureModel.logJointDensities(m_Values[j]))); } entropy *= Utils.log2; // Need natural logarithm, not base-2 logarithm return entropy / (double)m_NumValues; }