/** * Updates the model based on the current data. * Uses the leave-one-out Bootstrap to choose the number of components. */ protected void updateModel() { if (m_MixtureModel != null) { return; } else if (m_NumValues > 0) { // Shrink arrays if necessary if (m_Values.length > m_NumValues) { double[] values = new double[m_NumValues]; double[] weights = new double[m_NumValues]; System.arraycopy(m_Values, 0, values, 0, m_NumValues); System.arraycopy(m_Weights, 0, weights, 0, m_NumValues); m_Values = values; m_Weights = weights; } if (m_UseNormalizedEntropy) { m_MixtureModel = findModelUsingNormalizedEntropy(); } else { m_MixtureModel = buildModel(findNumComponentsUsingBootStrap(), m_Values, m_Weights); } } }
return buildModel(m_NumComponents, m_Values, m_Weights); return buildModel(1, m_Values, m_Weights); MM bestMixtureModel = buildModel(1, m_Values, m_Weights); double loglikelihoodForOneCluster = bestMixtureModel.loglikelihood(m_Values, m_Weights); double bestNormalizedEntropy = 1; for (int i = 2; i <= m_MaxNumComponents; i++) { MM mixtureModel = buildModel(i, m_Values, m_Weights); double entropy = entropy(mixtureModel); double normalizedEntropy = entropy / (loglikelihood - loglikelihoodForOneCluster);
/** * Returns the current set of options. * * @return the current set of options as a string */ @Override public String[] getOptions() { Vector<String> options = new Vector<String>(); options.add("-N"); options.add("" + getNumComponents()); options.add("-M"); options.add("" + getMaxNumComponents()); options.add("-S"); options.add("" + getSeed()); options.add("-B"); options.add("" + getNumBootstrapRuns()); if (m_UseNormalizedEntropy) { options.add("-E"); } return options.toArray(new String[0]); }
UnivariateMixtureEstimator e = new UnivariateMixtureEstimator(); e.setOptions(Arrays.copyOf(args, args.length)); sum += Math.exp(e.logDensity(r.nextDouble() * 10.0 - 5.0)); e.addValue(r.nextGaussian() * 0.5 - 1, 1); e.addValue(r.nextGaussian() * 0.5 + 1, 3); sum += Math.exp(e.logDensity(r.nextDouble() * 10.0 - 5.0)); e = new UnivariateMixtureEstimator(); e.setOptions(Arrays.copyOf(args, args.length)); e.addValue(r.nextGaussian() * 0.5 - 1, 1); e.addValue(r.nextGaussian() * 0.5 + 1, 1); e.addValue(r.nextGaussian() * 0.5 + 1, 1); e.addValue(r.nextGaussian() * 0.5 + 1, 1); sum += Math.exp(e.logDensity(r.nextDouble() * 10.0 - 5.0)); e = new UnivariateMixtureEstimator(); e.setOptions(Arrays.copyOf(args, args.length)); e.addValue(r.nextGaussian() * 5.0 + 3.0 , 1); double[][] intervals = e.predictIntervals(0.95); System.out.println("Lower: " + intervals[0][0] + " Upper: " + intervals[0][1]); double covered = 0;
for (int k = 0; k < m_NumBootstrapRuns; k++) { boolean[] inBag = new boolean[m_NumValues]; double[][] output = resampleWithWeights(m_Random, inBag); MM mixtureModel = buildModel(i, output[0], output[1]); double locLogLikelihood = 0; double totalWeight = 0;
MM tempModel = new UnivariateMixtureEstimator().new MM(); tempModel.initializeModel(K, values, weights, m_Random);
UnivariateMixtureEstimator e = new UnivariateMixtureEstimator(); e.setOptions(Arrays.copyOf(args, args.length)); sum += Math.exp(e.logDensity(r.nextDouble() * 10.0 - 5.0)); e.addValue(r.nextGaussian() * 0.5 - 1, 1); e.addValue(r.nextGaussian() * 0.5 + 1, 3); sum += Math.exp(e.logDensity(r.nextDouble() * 10.0 - 5.0)); e = new UnivariateMixtureEstimator(); e.setOptions(Arrays.copyOf(args, args.length)); e.addValue(r.nextGaussian() * 0.5 - 1, 1); e.addValue(r.nextGaussian() * 0.5 + 1, 1); e.addValue(r.nextGaussian() * 0.5 + 1, 1); e.addValue(r.nextGaussian() * 0.5 + 1, 1); sum += Math.exp(e.logDensity(r.nextDouble() * 10.0 - 5.0)); e = new UnivariateMixtureEstimator(); e.setOptions(Arrays.copyOf(args, args.length)); e.addValue(r.nextGaussian() * 5.0 + 3.0 , 1); double[][] intervals = e.predictIntervals(0.95); System.out.println("Lower: " + intervals[0][0] + " Upper: " + intervals[0][1]); double covered = 0;
for (int k = 0; k < m_NumBootstrapRuns; k++) { boolean[] inBag = new boolean[m_NumValues]; double[][] output = resampleWithWeights(m_Random, inBag); MM mixtureModel = buildModel(i, output[0], output[1]); double locLogLikelihood = 0; double totalWeight = 0;
MM tempModel = new UnivariateMixtureEstimator().new MM(); tempModel.initializeModel(K, values, weights, m_Random);
/** * Returns the current set of options. * * @return the current set of options as a string */ @Override public String[] getOptions() { Vector<String> options = new Vector<String>(); options.add("-N"); options.add("" + getNumComponents()); options.add("-M"); options.add("" + getMaxNumComponents()); options.add("-S"); options.add("" + getSeed()); options.add("-B"); options.add("" + getNumBootstrapRuns()); if (m_UseNormalizedEntropy) { options.add("-E"); } return options.toArray(new String[0]); }
/** * Updates the model based on the current data. * Uses the leave-one-out Bootstrap to choose the number of components. */ protected void updateModel() { if (m_MixtureModel != null) { return; } else if (m_NumValues > 0) { // Shrink arrays if necessary if (m_Values.length > m_NumValues) { double[] values = new double[m_NumValues]; double[] weights = new double[m_NumValues]; System.arraycopy(m_Values, 0, values, 0, m_NumValues); System.arraycopy(m_Weights, 0, weights, 0, m_NumValues); m_Values = values; m_Weights = weights; } if (m_UseNormalizedEntropy) { m_MixtureModel = findModelUsingNormalizedEntropy(); } else { m_MixtureModel = buildModel(findNumComponentsUsingBootStrap(), m_Values, m_Weights); } } }
return buildModel(m_NumComponents, m_Values, m_Weights); return buildModel(1, m_Values, m_Weights); MM bestMixtureModel = buildModel(1, m_Values, m_Weights); double loglikelihoodForOneCluster = bestMixtureModel.loglikelihood(m_Values, m_Weights); double bestNormalizedEntropy = 1; for (int i = 2; i <= m_MaxNumComponents; i++) { MM mixtureModel = buildModel(i, m_Values, m_Weights); double entropy = entropy(mixtureModel); double normalizedEntropy = entropy / (loglikelihood - loglikelihoodForOneCluster);