public Classifier train (InstanceList trainingSet) { featureSelector.selectFeaturesFor (trainingSet); // TODO What about also selecting features for the validation set? this.classifier = underlyingTrainer.train (trainingSet); return classifier; }
public void evaluateInstanceList (ClassifierTrainer trainer, InstanceList instances, String description) { Classifier classifier = trainer.getClassifier(); if (classifier.getFeatureSelection() != instances.getFeatureSelection()) // TODO consider if we really want to do this... but note that the old MaxEnt did this to the testing the validation sets. //instances.setFeatureSelection(classifier.getFeatureSelection()); System.out.print (description+" accuracy=" + classifier.getAccuracy (instances)); }
time[c] = System.currentTimeMillis(); ClassifierTrainer trainer = getTrainer(classifierTrainerStrings.get(c)); trainer.setValidationInstances(ilists[2]); System.out.println ("Trial " + trialIndex + " Training " + trainer + " with "+ilists[0].size()+" instances"); if (unlabeledProportionOption.value > 0) ilists[0].hideSomeLabels(unlabeledIndices); Classifier classifier = trainer.train (ilists[0]); if (unlabeledProportionOption.value > 0) ilists[0].unhideAllLabels();
time[c] = System.currentTimeMillis(); ClassifierTrainer trainer = getTrainer(classifierTrainerStrings.get(c)); trainer.setValidationInstances(ilists[2]); System.out.println ("Trial " + trialIndex + " Training " + trainer + " with "+ilists[0].size()+" instances"); if (unlabeledProportionOption.value > 0) ilists[0].hideSomeLabels(unlabeledIndices); Classifier classifier = trainer.train (ilists[0]); if (unlabeledProportionOption.value > 0) ilists[0].unhideAllLabels();
public Classifier train (InstanceList trainingSet) { featureSelector.selectFeaturesFor (trainingSet); // TODO What about also selecting features for the validation set? this.classifier = underlyingTrainer.train (trainingSet); return classifier; }
time[c] = System.currentTimeMillis(); ClassifierTrainer trainer = getTrainer(classifierTrainerStrings.get(c)); trainer.setValidationInstances(ilists[2]); System.out.println ("Trial " + trialIndex + " Training " + trainer + " with "+ilists[0].size()+" instances"); if (unlabeledProportionOption.value > 0) ilists[0].hideSomeLabels(unlabeledIndices); Classifier classifier = trainer.train (ilists[0]); if (unlabeledProportionOption.value > 0) ilists[0].unhideAllLabels();
public void evaluateInstanceList (ClassifierTrainer trainer, InstanceList instances, String description) { Classifier classifier = trainer.getClassifier(); if (classifier.getFeatureSelection() != instances.getFeatureSelection()) // TODO consider if we really want to do this... but note that the old MaxEnt did this to the testing the validation sets. //instances.setFeatureSelection(classifier.getFeatureSelection()); System.out.print (description+" accuracy=" + classifier.getAccuracy (instances)); }
public Classifier train (InstanceList trainingSet) { featureSelector.selectFeaturesFor (trainingSet); // TODO What about also selecting features for the validation set? this.classifier = underlyingTrainer.train (trainingSet); return classifier; }
public void evaluateInstanceList (ClassifierTrainer trainer, InstanceList instances, String description) { Classifier classifier = trainer.getClassifier(); if (classifier.getFeatureSelection() != instances.getFeatureSelection()) // TODO consider if we really want to do this... but note that the old MaxEnt did this to the testing the validation sets. //instances.setFeatureSelection(classifier.getFeatureSelection()); System.out.print (description+" accuracy=" + classifier.getAccuracy (instances)); }
this.classifier = trainer.train(instanceList);
this.classifier = trainer.train(instanceList);
public BaggingClassifier train (InstanceList trainingList) { Classifier[] classifiers = new Classifier[numBags]; java.util.Random r = new java.util.Random (); for (int round = 0; round < numBags; round++) { InstanceList bag = trainingList.sampleWithReplacement (r, trainingList.size()); classifiers[round] = underlyingTrainer.newClassifierTrainer().train (bag); } this.classifier = new BaggingClassifier (trainingList.getPipe(), classifiers); return classifier; }
public BaggingClassifier train (InstanceList trainingList) { Classifier[] classifiers = new Classifier[numBags]; java.util.Random r = new java.util.Random (); for (int round = 0; round < numBags; round++) { InstanceList bag = trainingList.sampleWithReplacement (r, trainingList.size()); classifiers[round] = underlyingTrainer.newClassifierTrainer().train (bag); } this.classifier = new BaggingClassifier (trainingList.getPipe(), classifiers); return classifier; }
public BaggingClassifier train (InstanceList trainingList) { Classifier[] classifiers = new Classifier[numBags]; java.util.Random r = new java.util.Random (); for (int round = 0; round < numBags; round++) { InstanceList bag = trainingList.sampleWithReplacement (r, trainingList.size()); classifiers[round] = underlyingTrainer.newClassifierTrainer().train (bag); } this.classifier = new BaggingClassifier (trainingList.getPipe(), classifiers); return classifier; }
public static Trial testTrainSplit(InstanceList instances) { InstanceList[] instanceLists = instances.split(new Randoms(), new double[] { 0.9, 0.1, 0.0 }); // LOG.debug("{} training instance, {} testing instances", // instanceLists[0].size(), instanceLists[1].size()); @SuppressWarnings("rawtypes") ClassifierTrainer trainer = new MaxEntTrainer(); Classifier classifier = trainer.train(instanceLists[TRAINING]); return new Trial(classifier, instanceLists[TESTING]); }
public void testRandomTrained () { ClassifierTrainer[] trainers = new ClassifierTrainer[1]; //trainers[0] = new NaiveBayesTrainer(); trainers[0] = new MaxEntTrainer(); //trainers[2] = new DecisionTreeTrainer(); Alphabet fd = dictOfSize (3); String[] classNames = new String[] {"class0", "class1", "class2"}; InstanceList ilist = new InstanceList (new Randoms(1), fd, classNames, 200); InstanceList lists[] = ilist.split (new java.util.Random(2), new double[] {.5, .5}); //System.out.println ("Training set size = "+lists[0].size()); //System.out.println ("Testing set size = "+lists[1].size()); Classifier[] classifiers = new Classifier[trainers.length]; for (int i = 0; i < trainers.length; i++) classifiers[i] = trainers[i].train (lists[0]); System.out.println ("Accuracy on training set:"); for (int i = 0; i < trainers.length; i++) System.out.println (classifiers[i].getClass().getName() + ": " + new Trial (classifiers[i], lists[0]).getAccuracy()); System.out.println ("Accuracy on testing set:"); for (int i = 0; i < trainers.length; i++) System.out.println (classifiers[i].getClass().getName() + ": " + new Trial (classifiers[i], lists[1]).getAccuracy()); }
public void testRandomTrained () { ClassifierTrainer[] trainers = new ClassifierTrainer[1]; //trainers[0] = new NaiveBayesTrainer(); trainers[0] = new MaxEntTrainer(); //trainers[2] = new DecisionTreeTrainer(); Alphabet fd = dictOfSize (3); String[] classNames = new String[] {"class0", "class1", "class2"}; InstanceList ilist = new InstanceList (new Randoms(1), fd, classNames, 200); InstanceList lists[] = ilist.split (new java.util.Random(2), new double[] {.5, .5}); //System.out.println ("Training set size = "+lists[0].size()); //System.out.println ("Testing set size = "+lists[1].size()); Classifier[] classifiers = new Classifier[trainers.length]; for (int i = 0; i < trainers.length; i++) classifiers[i] = trainers[i].train (lists[0]); System.out.println ("Accuracy on training set:"); for (int i = 0; i < trainers.length; i++) System.out.println (classifiers[i].getClass().getName() + ": " + new Trial (classifiers[i], lists[0]).getAccuracy()); System.out.println ("Accuracy on testing set:"); for (int i = 0; i < trainers.length; i++) System.out.println (classifiers[i].getClass().getName() + ": " + new Trial (classifiers[i], lists[1]).getAccuracy()); }