Classifier cModel = (Classifier)new NaiveBayes(); cModel.buildClassifier(isTrainingSet); weka.core.SerializationHelper.write("/some/where/nBayes.model", cModel); Classifier cls = (Classifier) weka.core.SerializationHelper.read("/some/where/nBayes.model"); // Test the model Evaluation eTest = new Evaluation(isTrainingSet); eTest.evaluateModel(cls, isTrainingSet);
/** * Returns the Capabilities of this filter. * * @return the capabilities of this object * @see Capabilities */ @Override public Capabilities getCapabilities() { return new NaiveBayes().getCapabilities(); }
/** * Main method for testing this class. * * @param argv the options */ public static void main(String[] argv) { runClassifier(new NaiveBayes(), argv); } }
NaiveBayes nB = new NaiveBayes(); nB.buildClassifier(train);
@Override protected Instances process(Instances instances) throws Exception { if (m_estimator == null) { m_estimator = new NaiveBayes(); trainingData = Filter.useFilter(instances, m_remove); m_estimator.buildClassifier(trainingData); Estimator[][] estimators = m_estimator.getConditionalEstimators(); Instances header = m_estimator.getHeader(); int index = 0; for (int i = 0; i < header.numAttributes(); i++) {
public class Run { public static void main(String[] args) throws Exception { ConverterUtils.DataSource source1 = new ConverterUtils.DataSource("./data/train.arff"); Instances train = source1.getDataSet(); // setting class attribute if the data format does not provide this information // For example, the XRFF format saves the class attribute information as well if (train.classIndex() == -1) train.setClassIndex(train.numAttributes() - 1); ConverterUtils.DataSource source2 = new ConverterUtils.DataSource("./data/test.arff"); Instances test = source2.getDataSet(); // setting class attribute if the data format does not provide this information // For example, the XRFF format saves the class attribute information as well if (test.classIndex() == -1) test.setClassIndex(train.numAttributes() - 1); // model NaiveBayes naiveBayes = new NaiveBayes(); naiveBayes.buildClassifier(train); // this does the trick double label = naiveBayes.classifyInstance(test.instance(0)); test.instance(0).setClassValue(label); System.out.println(test.instance(0).stringValue(4)); } }
getCapabilities().testWithFail(instances); while (enumInsts.hasMoreElements()) { Instance instance = enumInsts.nextElement(); updateClassifier(instance);
m_NB.updateClassifier(instance); double[] nbDist = m_NB.distributionForInstance(instance); instance.setWeight(-instance.weight()); m_NB.updateClassifier(instance);
m_NB = new NaiveBayes(); m_NB.buildClassifier(m_theInstances);
@Override protected Instances process(Instances instances) throws Exception { if (m_estimator == null) { m_estimator = new NaiveBayes(); trainingData = Filter.useFilter(instances, m_remove); m_estimator.buildClassifier(trainingData); Estimator[][] estimators = m_estimator.getConditionalEstimators(); Instances header = m_estimator.getHeader(); int index = 0; for (int i = 0; i < header.numAttributes(); i++) {
/** * Main method for testing this class. * * @param argv the options */ public static void main(String[] argv) { runClassifier(new NaiveBayes(), argv); } }
getCapabilities().testWithFail(instances); while (enumInsts.hasMoreElements()) { Instance instance = enumInsts.nextElement(); updateClassifier(instance);
class_distribs[i][(int) inst.classValue()] -= inst.weight(); inst.setWeight(-inst.weight()); m_NB.updateClassifier(inst); inst.setWeight(-inst.weight()); } else { double[] nbDist = m_NB.distributionForInstance(inst); m_NB.updateClassifier(inst); } else { class_distribs[i][0] += (inst.classValue() * inst.weight());
public AggregateableFilteredClassifier() { m_Classifier = new NaiveBayes(); }
@Test public void testScoreWithClassifier() throws Exception { Instances train = new Instances(new BufferedReader(new StringReader( CorrelationMatrixMapTaskTest.IRIS))); train.setClassIndex(train.numAttributes() - 1); NaiveBayes bayes = new NaiveBayes(); bayes.buildClassifier(train); WekaScoringMapTask task = new WekaScoringMapTask(); task.setModel(bayes, train, train); assertEquals(0, task.getMissingMismatchAttributeInfo().length()); assertEquals(3, task.getPredictionLabels().size()); for (int i = 0; i < train.numInstances(); i++) { assertEquals(3, task.processInstance(train.instance(i)).length); } }
/** * Returns the Capabilities of this filter. * * @return the capabilities of this object * @see Capabilities */ @Override public Capabilities getCapabilities() { return new NaiveBayes().getCapabilities(); }
/** Creates a default NaiveBayes */ public Classifier getClassifier() { return new NaiveBayes(); }
m_NB = new NaiveBayes(); m_NB.buildClassifier(m_theInstances);
Classifier Mode; // a parent class if(alg.equals("DecisionStump")) { Mode = new DecisionStump(); } else if(alg.equals("NaiveBayes")) { Mode = new NaiveBayes(); }
@Test public void testScoreWithClassifierSomeMissingFields() throws Exception { Instances train = new Instances(new BufferedReader(new StringReader( CorrelationMatrixMapTaskTest.IRIS))); train.setClassIndex(train.numAttributes() - 1); NaiveBayes bayes = new NaiveBayes(); bayes.buildClassifier(train); WekaScoringMapTask task = new WekaScoringMapTask(); Remove r = new Remove(); r.setAttributeIndices("1"); r.setInputFormat(train); Instances test = Filter.useFilter(train, r); task.setModel(bayes, train, test); assertTrue(task.getMissingMismatchAttributeInfo().length() > 0); assertTrue(task.getMissingMismatchAttributeInfo().equals( "sepallength missing from incoming data\n")); assertEquals(3, task.getPredictionLabels().size()); for (int i = 0; i < test.numInstances(); i++) { assertEquals(3, task.processInstance(test.instance(i)).length); } }