/** * Calls toSummaryString() with no title and no complexity stats. * * @return a summary description of the classifier evaluation */ @Override public String toSummaryString() { return toSummaryString("", false); }
/** * Calls toSummaryString() with a default title. * * @param printComplexityStatistics if true, complexity statistics are * returned as well * @return the summary string */ public String toSummaryString(boolean printComplexityStatistics) { return toSummaryString("=== Summary ===\n", printComplexityStatistics); }
/** * Calls toSummaryString() with no title and no complexity stats. * * @return a summary description of the classifier evaluation */ @Override public String toSummaryString() { return m_delegate.toSummaryString(); }
/** * Calls toSummaryString() with a default title. * * @param printComplexityStatistics if true, complexity statistics are * returned as well * @return the summary string */ public String toSummaryString(boolean printComplexityStatistics) { return m_delegate.toSummaryString(printComplexityStatistics); }
/** * Calls toSummaryString() with a default title. * * @param printComplexityStatistics if true, complexity statistics are * returned as well * @return the summary string */ public String toSummaryString(boolean printComplexityStatistics) { return toSummaryString("=== Summary ===\n", printComplexityStatistics); }
/** * Calls toSummaryString() with no title and no complexity stats. * * @return a summary description of the classifier evaluation */ @Override public String toSummaryString() { return toSummaryString("", false); }
/** * Calls toSummaryString() with no title and no complexity stats. * * @return a summary description of the classifier evaluation */ @Override public String toSummaryString() { return m_delegate.toSummaryString(); }
/** * Calls toSummaryString() with a default title. * * @param printComplexityStatistics if true, complexity statistics are * returned as well * @return the summary string */ public String toSummaryString(boolean printComplexityStatistics) { return m_delegate.toSummaryString(printComplexityStatistics); }
/** * Outputs the performance statistics in summary form. Lists number (and * percentage) of instances classified correctly, incorrectly and * unclassified. Outputs the total number of instances classified, and the * number of instances (if any) that had no class value provided. * * @param title the title for the statistics * @param printComplexityStatistics if true, complexity statistics are * returned as well * @return the summary as a String */ public String toSummaryString(String title, boolean printComplexityStatistics) { return m_delegate.toSummaryString(title, printComplexityStatistics); }
/** * Outputs the performance statistics in summary form. Lists number (and * percentage) of instances classified correctly, incorrectly and * unclassified. Outputs the total number of instances classified, and the * number of instances (if any) that had no class value provided. * * @param title the title for the statistics * @param printComplexityStatistics if true, complexity statistics are * returned as well * @return the summary as a String */ public String toSummaryString(String title, boolean printComplexityStatistics) { return m_delegate.toSummaryString(title, printComplexityStatistics); }
Evaluation eval = new Evaluation(data); eval.evaluateModel(j48DecisionTree, data); System.out.println(eval.toSummaryString("\nResults\n======\n", true));
Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, train); System.out.println(eval.errorRate()); //Printing Training Mean root squared Error System.out.println(eval.toSummaryString()); //Summary of Training
//set the class index dataFiltered.setClassIndex(dataFiltered.numAttributes() - 1); //build a model -- choose a classifier as you want classifier.buildClassifier(dataFiltered); Evaluation eval = new Evaluation(dataFiltered); eval.crossValidateModel(classifier, dataFiltered, 10, new Random(1)); //print stats -- do not require to calculate confusion mtx, weka do it! System.out.println(classifier); System.out.println(eval.toSummaryString()); System.out.println(eval.toMatrixString()); System.out.println(eval.toClassDetailsString());
/** * Returns description of the bagged classifier. * * @return description of the bagged classifier as a string */ @Override public String toString() { if (m_Classifiers == null) { return "Bagging: No model built yet."; } StringBuffer text = new StringBuffer(); text.append("Bagging with " + getNumIterations() + " iterations and base learner\n\n" + getClassifierSpec()); if (getPrintClassifiers()) { text.append("All the base classifiers: \n\n"); for (int i = 0; i < m_Classifiers.length; i++) text.append(m_Classifiers[i].toString() + "\n\n"); } if (m_CalcOutOfBag) { text.append(m_OutOfBagEvaluationObject.toSummaryString("\n\n*** Out-of-bag estimates ***\n", getOutputOutOfBagComplexityStatistics())); } return text.toString(); }
/** * Returns description of the bagged classifier. * * @return description of the bagged classifier as a string */ @Override public String toString() { if (m_Classifiers == null) { return "Bagging: No model built yet."; } StringBuffer text = new StringBuffer(); text.append("Bagging with " + getNumIterations() + " iterations and base learner\n\n" + getClassifierSpec()); if (getPrintClassifiers()) { text.append("All the base classifiers: \n\n"); for (int i = 0; i < m_Classifiers.length; i++) text.append(m_Classifiers[i].toString() + "\n\n"); } if (m_CalcOutOfBag) { text.append(m_OutOfBagEvaluationObject.toSummaryString("\n\n*** Out-of-bag estimates ***\n", getOutputOutOfBagComplexityStatistics())); } return text.toString(); }
1. filteredData = new Instances(new BufferedReader(new FileReader("/Users/Passionate/Desktop/train_std.arff"))); 2. Instances filteredTests= new Instances(new BufferedReader(new FileReader("/Users/Passionate/Desktop/test_std.arff"))); 3. filteredData.setClassIndex(filteredData.attribute("@@class@@").index()); 4. Classifier classifier=new SMO(); 5. classifier.buildClassifier(filteredData); 6. FilteredClassifier filteredClassifier=new FilteredClassifier(); 7. filteredClassifier.setClassifier(classifier); 8. Evaluation eval = new Evaluation(filteredData); 9. eval.evaluateModel(filteredClassifier, filteredTests); **// Error line.** 10. System.out.println(eval.toSummaryString("\nResults\n======\n", false));
public static void classify() { try { Instances train = new Instances (...); train.setClassIndex(train.numAttributes() - 1); Instances test = new Instances (...); test.setClassIndex(test.numAttributes() - 1); ClassificationType classificationType = ClassificationTypeDAO.get(6); // 6 is SVM. LibSVM classifier = new LibSVM(); String options = (classificationType.getParameters()); String[] optionsArray = options.split(" "); classifier.setOptions(optionsArray); classifier.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(classifier, test); System.out.println(eval.toSummaryString("\nResults\n======\n", false)); } catch (Exception ex) { Misc_Utils.printStackTrace(ex); } }
evaluation = new Evaluation (train_data); evaluation.crossValidateModel(c, train_data, 10, new Random(1)); System.out.println(evaluation.toSummaryString());
public void testRegression() throws Exception { Instances inst = new Instances(new StringReader(DATA)); inst.setClassIndex(inst.numAttributes() - 1); Evaluation eval = new Evaluation(inst); for (int i = 0; i < inst.numInstances(); i++) { eval.evaluateModelOnceAndRecordPrediction(PREDS[i], inst.instance(i)); } String standard = eval.toSummaryString(); String info = eval.toClassDetailsString(); weka.test.Regression reg = new weka.test.Regression(getClass()); reg.println(standard); reg.println(info); try { String diff = reg.diff(); if (diff == null) { System.err.println("Warning: No reference available, creating."); } else if (!diff.equals("")) { fail("Regression tst failed. Difference:\n" + diff); } } catch (IOException ex) { fail("Problem during regression testing.\n" + ex); } }
public void testRegression() throws Exception { Instances inst = new Instances(new StringReader(DATA)); inst.setClassIndex(inst.numAttributes() - 1); Evaluation eval = new Evaluation(inst); for (int i = 0; i < inst.numInstances(); i++) { eval.evaluateModelOnceAndRecordPrediction(PREDS[i], inst.instance(i)); } String standard = eval.toSummaryString(); String info = eval.toClassDetailsString(); weka.test.Regression reg = new weka.test.Regression(getClass()); reg.println(standard); reg.println(info); try { String diff = reg.diff(); if (diff == null) { System.err.println("Warning: No reference available, creating."); } else if (!diff.equals("")) { fail("Regression tst failed. Difference:\n" + diff); } } catch (IOException ex) { fail("Problem during regression testing.\n" + ex); } }