/** * Get a probability estimator for a value * * @param given the new value that data is conditional upon * @return the estimator for the supplied value given the condition */ public Estimator getEstimator(double given) { Estimator result = new DiscreteEstimator(m_Estimators.length,false); for(int i = 0; i < m_Estimators.length; i++) { result.addValue(i,m_Weights.getProbability(i) *m_Estimators[i].getProbability(given)); } return result; }
/** * Get a probability estimator for a value * * @param given the new value that data is conditional upon * @return the estimator for the supplied value given the condition */ public Estimator getEstimator(double given) { Estimator result = new DiscreteEstimator(m_Estimators.length,false); for(int i = 0; i < m_Estimators.length; i++) { result.addValue(i,m_Weights.getProbability(i) *m_Estimators[i].getProbability(given)); } return result; }
/** * Get a probability estimator for a value * * @param given the new value that data is conditional upon * @return the estimator for the supplied value given the condition */ public Estimator getEstimator(double given) { Estimator result = new DiscreteEstimator(m_Estimators.length,false); for(int i = 0; i < m_Estimators.length; i++) { //System.out.println("Val " + i // + " Weight:" + m_Weights.getProbability(i) // +" EstProb(" + given + ")=" // + m_Estimators[i].getProbability(given)); result.addValue(i, m_Weights.getProbability(i) * m_Estimators[i].getProbability(given)); } return result; }
/** * Get a probability estimator for a value * * @param given the new value that data is conditional upon * @return the estimator for the supplied value given the condition */ public Estimator getEstimator(double given) { Estimator result = new DiscreteEstimator(m_Estimators.length,false); for(int i = 0; i < m_Estimators.length; i++) { //System.out.println("Val " + i // + " Weight:" + m_Weights.getProbability(i) // +" EstProb(" + given + ")=" // + m_Estimators[i].getProbability(given)); result.addValue(i, m_Weights.getProbability(i) * m_Estimators[i].getProbability(given)); } return result; }
/** * Get a probability estimator for a value * * @param given the new value that data is conditional upon * @return the estimator for the supplied value given the condition */ public Estimator getEstimator(double given) { Estimator result = new KernelEstimator(m_Precision); if (m_NumValues == 0) { return result; } double delta = 0, currentProb = 0; double zLower, zUpper; for(int i = 0; i < m_NumValues; i++) { delta = m_CondValues[i] - given; zLower = (delta - (m_Precision / 2)) / m_StandardDev; zUpper = (delta + (m_Precision / 2)) / m_StandardDev; currentProb = (Statistics.normalProbability(zUpper) - Statistics.normalProbability(zLower)); result.addValue(m_Values[i], currentProb * m_Weights[i]); } return result; }
/** * Initialize the estimator with all values of one attribute of a dataset. * Some estimator might ignore the min and max values. * * @param data the dataset used to build this estimator * @param attrIndex attribute the estimator is for * @param min minimal border of range * @param max maximal border of range * @param factor number of instances has been reduced to that factor * @exception Exception if building of estimator goes wrong */ public void addValues(Instances data, int attrIndex, double min, double max, double factor) throws Exception { // no handling of factor, would have to be overridden // no handling of min and max, would have to be overridden int numInst = data.numInstances(); for (int i = 1; i < numInst; i++) { addValue(data.instance(i).value(attrIndex), 1.0); } }
/** * Initialize the estimator with all values of one attribute of a dataset. * Some estimator might ignore the min and max values. * * @param data the dataset used to build this estimator * @param attrIndex attribute the estimator is for * @param min minimal border of range * @param max maximal border of range * @param factor number of instances has been reduced to that factor * @exception Exception if building of estimator goes wrong */ public void addValues(Instances data, int attrIndex, double min, double max, double factor) throws Exception { // no handling of factor, would have to be overridden // no handling of min and max, would have to be overridden int numInst = data.numInstances(); for (int i = 1; i < numInst; i++) { addValue(data.instance(i).value(attrIndex), 1.0); } }
/** * Updates the classifier with the given instance. * * @param instance the new training instance to include in the model * @exception Exception if the instance could not be incorporated in the * model. */ public void updateClassifier(Instance instance) throws Exception { if (!instance.classIsMissing()) { Enumeration<Attribute> enumAtts = m_Instances.enumerateAttributes(); int attIndex = 0; while (enumAtts.hasMoreElements()) { Attribute attribute = enumAtts.nextElement(); if (!instance.isMissing(attribute)) { m_Distributions[attIndex][(int) instance.classValue()].addValue( instance.value(attribute), instance.weight()); } attIndex++; } m_ClassDistribution.addValue(instance.classValue(), instance.weight()); } }
/** * Updates the classifier with the given instance. * * @param instance the new training instance to include in the model * @exception Exception if the instance could not be incorporated in the * model. */ public void updateClassifier(Instance instance) throws Exception { if (!instance.classIsMissing()) { Enumeration<Attribute> enumAtts = m_Instances.enumerateAttributes(); int attIndex = 0; while (enumAtts.hasMoreElements()) { Attribute attribute = enumAtts.nextElement(); if (!instance.isMissing(attribute)) { m_Distributions[attIndex][(int) instance.classValue()].addValue( instance.value(attribute), instance.weight()); } attIndex++; } m_ClassDistribution.addValue(instance.classValue(), instance.weight()); } }
/** * Get a probability estimator for a value * * @param given the new value that data is conditional upon * @return the estimator for the supplied value given the condition */ public Estimator getEstimator(double given) { Estimator result = new KernelEstimator(m_Precision); if (m_NumValues == 0) { return result; } double delta = 0, currentProb = 0; double zLower, zUpper; for(int i = 0; i < m_NumValues; i++) { delta = m_CondValues[i] - given; zLower = (delta - (m_Precision / 2)) / m_StandardDev; zUpper = (delta + (m_Precision / 2)) / m_StandardDev; currentProb = (Statistics.normalProbability(zUpper) - Statistics.normalProbability(zLower)); result.addValue(m_Values[i], currentProb * m_Weights[i]); } return result; }
if (m_theInstances.attribute(j).isNominal()) { for (int k = 0; k < m_theInstances.attribute(j).numValues(); k++) { m_model[i][j].addValue(k, m.m_taskModel[i][j].getCount(k));
if (m_theInstances.attribute(j).isNominal()) { for (int k = 0; k < m_theInstances.attribute(j).numValues(); k++) { m_model[i][j].addValue(k, m.m_taskModel[i][j].getCount(k));
/** * Updates the classifier with the given instance. * * @param bayesNet the bayes net to use * @param instance the new training instance to include in the model * @throws Exception if the instance could not be incorporated in the model. */ @Override public void updateClassifier(BayesNet bayesNet, Instance instance) throws Exception { for (int iAttribute = 0; iAttribute < bayesNet.m_Instances.numAttributes(); iAttribute++) { double iCPT = 0; for (int iParent = 0; iParent < bayesNet.getParentSet(iAttribute) .getNrOfParents(); iParent++) { int nParent = bayesNet.getParentSet(iAttribute).getParent(iParent); iCPT = iCPT * bayesNet.m_Instances.attribute(nParent).numValues() + instance.value(nParent); } bayesNet.m_Distributions[iAttribute][(int) iCPT].addValue( instance.value(iAttribute), instance.weight()); } } // updateClassifier
/** * Updates the classifier with the given instance. * * @param bayesNet the bayes net to use * @param instance the new training instance to include in the model * @throws Exception if the instance could not be incorporated in the model. */ @Override public void updateClassifier(BayesNet bayesNet, Instance instance) throws Exception { for (int iAttribute = 0; iAttribute < bayesNet.m_Instances.numAttributes(); iAttribute++) { double iCPT = 0; for (int iParent = 0; iParent < bayesNet.getParentSet(iAttribute) .getNrOfParents(); iParent++) { int nParent = bayesNet.getParentSet(iAttribute).getParent(iParent); iCPT = iCPT * bayesNet.m_Instances.attribute(nParent).numValues() + instance.value(nParent); } bayesNet.m_Distributions[iAttribute][(int) iCPT].addValue( instance.value(iAttribute), instance.weight()); } } // updateClassifier
.numValues(), true); for (k = 0; k < inst.attribute(j).numValues(); k++) { m_model[i][j].addValue(k, nominalCounts[i][j][k]);
.numValues(), true); for (k = 0; k < inst.attribute(j).numValues(); k++) { m_model[i][j].addValue(k, nominalCounts[i][j][k]);
/** * The M step of the EM algorithm. * * @param inst the training instances * @throws Exception if something goes wrong */ private void M(Instances inst) throws Exception { int i, j, l; new_estimators(); estimate_priors(inst); // sum for (l = 0; l < inst.numInstances(); l++) { Instance in = inst.instance(l); for (i = 0; i < m_num_clusters; i++) { for (j = 0; j < m_num_attribs; j++) { if (inst.attribute(j).isNominal()) { m_model[i][j] .addValue(in.value(j), in.weight() * m_weights[l][i]); } else { m_modelNormal[i][j][0] += (in.value(j) * in.weight() * m_weights[l][i]); m_modelNormal[i][j][2] += in.weight() * m_weights[l][i]; m_modelNormal[i][j][1] += (in.value(j) * in.value(j) * in.weight() * m_weights[l][i]); } } } } // re-estimate Gaussian parameters M_reEstimate(inst); }
/** * The M step of the EM algorithm. * * @param inst the training instances * @throws Exception if something goes wrong */ private void M(Instances inst) throws Exception { int i, j, l; new_estimators(); estimate_priors(inst); // sum for (l = 0; l < inst.numInstances(); l++) { Instance in = inst.instance(l); for (i = 0; i < m_num_clusters; i++) { for (j = 0; j < m_num_attribs; j++) { if (inst.attribute(j).isNominal()) { m_model[i][j] .addValue(in.value(j), in.weight() * m_weights[l][i]); } else { m_modelNormal[i][j][0] += (in.value(j) * in.weight() * m_weights[l][i]); m_modelNormal[i][j][2] += in.weight() * m_weights[l][i]; m_modelNormal[i][j][1] += (in.value(j) * in.value(j) * in.weight() * m_weights[l][i]); } } } } // re-estimate Gaussian parameters M_reEstimate(inst); }