protected void updateWeights(MultilabelClassifier h, Instances D) throws Exception { for(Instance x : D) { double w = 1.0; // weight (product of probability) double y[] = h.distributionForInstance(x); // convert ML distribution into probability vector, and multiply to w as we go .. for(int j = 0; j < y.length; j++) { w *= (y[j] < 0.5) ? 1. - y[j] : y[j]; } x.setWeight(w); } }
protected void updateWeights(ProblemTransformationMethod h, Instances D) throws Exception { for(Instance x : D) { double w = 1.0; // weight (product of probability) double y[] = h.distributionForInstance(x); // convert ML distribution into probability vector, and multiply to w as we go .. for(int j = 0; j < y.length; j++) { w *= (y[j] < 0.5) ? 1. - y[j] : y[j]; } x.setWeight(w); } }
protected void updateWeights(ProblemTransformationMethod h, Instances D) throws Exception { for(Instance x : D) { double w = 1.0; // weight (product of probability) double y[] = h.distributionForInstance(x); // convert ML distribution into probability vector, and multiply to w as we go .. for(int j = 0; j < y.length; j++) { w *= (y[j] < 0.5) ? 1. - y[j] : y[j]; } x.setWeight(w); } }
/** * Transforms a multi-label instance to a list of single-label instances, * one for each of the labels that annotate the instance, by copying the * feature vector and attaching a weight equal to 1/(list size). * * @param instance a multi-label instance * @return a list with the transformed single-label instances */ @Override List<Instance> transformInstance(Instance instance) { List<Instance> copy = super.transformInstance(instance); for (Instance anInstance : copy) { anInstance.setWeight(1.0 / copy.size()); } return copy; } }
@SuppressWarnings("unchecked") private void weightTrainingInstances() { double positiveInstances = 0 ; double negativeInstances = 0 ; Enumeration<Instance> e = dataset.enumerateInstances() ; while (e.hasMoreElements()) { Instance i = (Instance) e.nextElement() ; double isValidSense = i.value(3) ; if (isValidSense == 0) positiveInstances ++ ; else negativeInstances ++ ; } double p = (double) positiveInstances / (positiveInstances + negativeInstances) ; e = dataset.enumerateInstances() ; while (e.hasMoreElements()) { Instance i = (Instance) e.nextElement() ; double isValidSense = i.value(3) ; if (isValidSense == 0) i.setWeight(0.5 * (1.0/p)) ; else i.setWeight(0.5 * (1.0/(1-p))) ; } }
@SuppressWarnings("unchecked") private void weightTrainingInstances() { double positiveInstances = 0 ; double negativeInstances = 0 ; Enumeration<Instance> e = dataset.enumerateInstances() ; while (e.hasMoreElements()) { Instance i = (Instance) e.nextElement() ; double isValidSense = i.value(3) ; if (isValidSense == 0) positiveInstances ++ ; else negativeInstances ++ ; } double p = (double) positiveInstances / (positiveInstances + negativeInstances) ; e = dataset.enumerateInstances() ; while (e.hasMoreElements()) { Instance i = (Instance) e.nextElement() ; double isValidSense = i.value(3) ; if (isValidSense == 0) i.setWeight(0.5 * (1.0/p)) ; else i.setWeight(0.5 * (1.0/(1-p))) ; } }
@SuppressWarnings("unchecked") private void weightTrainingInstances() { double positiveInstances = 0 ; double negativeInstances = 0 ; Enumeration<Instance> e = dataset.enumerateInstances() ; while (e.hasMoreElements()) { Instance i = (Instance) e.nextElement() ; double isValidSense = i.value(3) ; if (isValidSense == 0) positiveInstances ++ ; else negativeInstances ++ ; } double p = (double) positiveInstances / (positiveInstances + negativeInstances) ; e = dataset.enumerateInstances() ; while (e.hasMoreElements()) { Instance i = (Instance) e.nextElement() ; double isValidSense = i.value(3) ; if (isValidSense == 0) i.setWeight(0.5 * (1.0/p)) ; else i.setWeight(0.5 * (1.0/(1-p))) ; } }
/** * Get the current sample as a set of weighted instances * * @return the current sample as a set of weighted instances * @throws Exception if we haven't seen any instances yet */ public Instances getSampleAsWeightedInstances() throws Exception { if (m_sample.size() == 0) { throw new Exception( "Can't get the sample as a set of weighted Instnaces because " + "we haven't seen any instances yet!"); } Instances insts = new Instances(m_sample.peek().m_instance.dataset(), m_sampleSize); for (InstanceHolder i : m_sample) { // copy here as we are setting the weight Instance toAdd = (Instance) i.m_instance.copy(); toAdd.setWeight(i.m_weight); insts.add(toAdd); } insts.compactify(); return insts; }
/** * Sets the weights for the next iteration. * * @param training the training instances * @param reweight the reweighting factor * @throws Exception if something goes wrong */ protected void setWeights(Instances training, double reweight) throws Exception { double oldSumOfWeights, newSumOfWeights; oldSumOfWeights = training.sumOfWeights(); Enumeration<Instance> enu = training.enumerateInstances(); while (enu.hasMoreElements()) { Instance instance = enu.nextElement(); if (!Utils.eq( m_Classifiers[m_NumIterationsPerformed].classifyInstance(instance), instance.classValue())) { instance.setWeight(instance.weight() * reweight); } } // Renormalize weights newSumOfWeights = training.sumOfWeights(); enu = training.enumerateInstances(); while (enu.hasMoreElements()) { Instance instance = enu.nextElement(); instance.setWeight(instance.weight() * oldSumOfWeights / newSumOfWeights); } }
@Override public void updateClassifier(Instance x) throws Exception { for(int i = 0; i < m_NumIterations; i++) { // Oza-Bag style int k = poisson(1.0, random); if (m_BagSizePercent == 100) { // Train on all instances k = 1; } if (k > 0) { // Train on this instance only if k > 0 Instance x_weighted = (Instance) x.copy(); x_weighted.setWeight(x.weight() * (double)k); ((UpdateableClassifier)m_Classifiers[i]).updateClassifier(x_weighted); } } }
@Override public void updateClassifier(Instance x) throws Exception { for(int i = 0; i < m_NumIterations; i++) { // Oza-Bag style int k = poisson(1.0, random); if (m_BagSizePercent == 100) { // Train on all instances k = 1; } if (k > 0) { // Train on this instance only if k > 0 Instance x_weighted = (Instance) x.copy(); x_weighted.setWeight(x.weight() * (double)k); ((UpdateableClassifier)m_Classifiers[i]).updateClassifier(x_weighted); } } }
@Override public void updateClassifier(Instance x) throws Exception { for(int i = 0; i < m_NumIterations; i++) { // Oza-Bag style int k = poisson(1.0, random); if (m_BagSizePercent == 100) { // Train on all instances k = 1; } if (k > 0) { // Train on this instance only if k > 0 Instance x_weighted = (Instance) x.copy(); x_weighted.setWeight(x.weight() * (double)k); ((UpdateableClassifier)m_Classifiers[i]).updateClassifier(x_weighted); } } }
@Override public void buildClassifier(Instances train) throws Exception { testCapabilities(train); if (getDebug()) System.out.print("-: Models: "); train = new Instances(train); m_Classifiers = ProblemTransformationMethod.makeCopies((MultiLabelClassifier) m_Classifier, m_NumIterations); for(int i = 0; i < m_NumIterations; i++) { Random r = new Random(m_Seed+i); Instances bag = new Instances(train,0); if (m_Classifiers[i] instanceof Randomizable) ((Randomizable)m_Classifiers[i]).setSeed(m_Seed+i); if(getDebug()) System.out.print(""+i+" "); int ixs[] = new int[train.numInstances()]; for(int j = 0; j < ixs.length; j++) { ixs[r.nextInt(ixs.length)]++; } for(int j = 0; j < ixs.length; j++) { if (ixs[j] > 0) { Instance instance = train.instance(j); instance.setWeight(ixs[j]); bag.add(instance); } } m_Classifiers[i].buildClassifier(bag); } if (getDebug()) System.out.println(":-"); }
@Override public void buildClassifier(Instances train) throws Exception { testCapabilities(train); if (getDebug()) System.out.print("-: Models: "); train = new Instances(train); m_Classifiers = ProblemTransformationMethod.makeCopies((MultiLabelClassifier) m_Classifier, m_NumIterations); for(int i = 0; i < m_NumIterations; i++) { Random r = new Random(m_Seed+i); Instances bag = new Instances(train,0); if (m_Classifiers[i] instanceof Randomizable) ((Randomizable)m_Classifiers[i]).setSeed(m_Seed+i); if(getDebug()) System.out.print(""+i+" "); int ixs[] = new int[train.numInstances()]; for(int j = 0; j < ixs.length; j++) { ixs[r.nextInt(ixs.length)]++; } for(int j = 0; j < ixs.length; j++) { if (ixs[j] > 0) { Instance instance = train.instance(j); instance.setWeight(ixs[j]); bag.add(instance); } } m_Classifiers[i].buildClassifier(bag); } if (getDebug()) System.out.println(":-"); }
@Override public void buildClassifier(Instances train) throws Exception { testCapabilities(train); if (getDebug()) System.out.print("-: Models: "); train = new Instances(train); //m_Classifiers = (MultilabelClassifier[]) AbstractClassifier.makeCopies(m_Classifier, m_NumIterations); m_Classifiers = MultilabelClassifier.makeCopies((MultilabelClassifier)m_Classifier, m_NumIterations); for(int i = 0; i < m_NumIterations; i++) { Random r = new Random(m_Seed+i); Instances bag = new Instances(train,0); if (m_Classifiers[i] instanceof Randomizable) ((Randomizable)m_Classifiers[i]).setSeed(m_Seed+i); if(getDebug()) System.out.print(""+i+" "); int ixs[] = new int[train.numInstances()]; for(int j = 0; j < ixs.length; j++) { ixs[r.nextInt(ixs.length)]++; } for(int j = 0; j < ixs.length; j++) { if (ixs[j] > 0) { Instance instance = train.instance(j); instance.setWeight(ixs[j]); bag.add(instance); } } m_Classifiers[i].buildClassifier(bag); } if (getDebug()) System.out.println(":-"); }
/** * LeaveOneOutCV returns the accuracy calculated using Leave One Out cross * validation. The dataset used is m_Instances associated with the Bayes * Network. * * @param bayesNet : Bayes Network containing structure to evaluate * @return accuracy (in interval 0..1) measured using leave one out cv. * @throws Exception passed on by updateClassifier */ public double leaveOneOutCV(BayesNet bayesNet) throws Exception { m_BayesNet = bayesNet; double fAccuracy = 0.0; double fWeight = 0.0; Instances instances = bayesNet.m_Instances; bayesNet.estimateCPTs(); for (int iInstance = 0; iInstance < instances.numInstances(); iInstance++) { Instance instance = instances.instance(iInstance); instance.setWeight(-instance.weight()); bayesNet.updateClassifier(instance); fAccuracy += accuracyIncrease(instance); fWeight += instance.weight(); instance.setWeight(-instance.weight()); bayesNet.updateClassifier(instance); } return fAccuracy / fWeight; } // LeaveOneOutCV
public void setInstanceWeight(int index, double weight, boolean notify) { if (!m_IgnoreChanges) { addUndoPoint(); } m_Data.instance(index).setWeight(weight); if (notify) { if (m_ShowInstanceWeights) { notifyListener(new TableModelEvent(this, index, 1)); } else { if (weight != 1.0) { m_ShowInstanceWeights = true; notifyListener(new TableModelEvent(this, TableModelEvent.HEADER_ROW)); } } } }
public void setInstanceWeight(int index, double weight, boolean notify) { if (!m_IgnoreChanges) { addUndoPoint(); } m_Data.instance(index).setWeight(weight); if (notify) { if (m_ShowInstanceWeights) { notifyListener(new TableModelEvent(this, index, 1)); } else { if (weight != 1.0) { m_ShowInstanceWeights = true; notifyListener(new TableModelEvent(this, TableModelEvent.HEADER_ROW)); } } } }
/** * Convert a single instance over. The converted instance is added to the end * of the output queue. * * @param instance the instance to convert * @throws Exception if something goes wrong */ protected void convertInstance(Instance instance) throws Exception { // Make copy and set weight to one Instance cp = (Instance) instance.copy(); cp.setWeight(1.0); // Set up values double[] instanceVals = new double[outputFormatPeek().numAttributes()]; double[] vals = m_partitionGenerator.getMembershipValues(cp); System.arraycopy(vals, 0, instanceVals, 0, vals.length); if (instance.classIndex() >= 0) { instanceVals[instanceVals.length - 1] = instance.classValue(); } push(new SparseInstance(instance.weight(), instanceVals)); }
/** * Convert a single instance over. The converted instance is added to the end * of the output queue. * * @param instance the instance to convert * @throws Exception if something goes wrong */ protected void convertInstance(Instance instance) throws Exception { // Make copy and set weight to one Instance cp = (Instance) instance.copy(); cp.setWeight(1.0); // Set up values double[] instanceVals = new double[outputFormatPeek().numAttributes()]; double[] vals = m_partitionGenerator.getMembershipValues(cp); System.arraycopy(vals, 0, instanceVals, 0, vals.length); if (instance.classIndex() >= 0) { instanceVals[instanceVals.length - 1] = instance.classValue(); } push(new SparseInstance(instance.weight(), instanceVals)); }