/** * Main method for testing this class. * * @param argv the options */ public static void main(String[] argv) { runClassifier(new LogitBoost(), argv); } }
/** * Method used to build the classifier. */ public void buildClassifier(Instances data) throws Exception { reset(); // Initialize classifier initializeClassifier(data); // For the given number of iterations while (next()) { } // Clean up done(); }
/** * Method used to build the classifier. */ public void buildClassifier(Instances data) throws Exception { // Initialize classifier initializeClassifier(data); // For the given number of iterations while (next()) { } // Clean up done(); }
/** * Gets the current settings of the Classifier. * * @return an array of strings suitable for passing to setOptions */ public String[] getOptions() { Vector<String> options = new Vector<String>(); if (getUseResampling()) { options.add("-Q"); } else { options.add("-P"); options.add("" + getWeightThreshold()); } if (getUseEstimatedPriors()) { options.add("-use-estimated-priors"); } options.add("-L"); options.add("" + getLikelihoodThreshold()); options.add("-H"); options.add("" + getShrinkage()); options.add("-Z"); options.add("" + getZMax()); options.add("-O"); options.add("" + getPoolSize()); options.add("-E"); options.add("" + getNumThreads()); Collections.addAll(options, super.getOptions()); return options.toArray(new String[0]); }
/** * Tests the MarginCurve generation from the command line. The classifier is * currently hardcoded. Pipe in an arff file. * * @param args currently ignored */ public static void main(String[] args) { try { Utils.SMALL = 0; Instances inst = new Instances(new java.io.InputStreamReader(System.in)); inst.setClassIndex(inst.numAttributes() - 1); MarginCurve tc = new MarginCurve(); EvaluationUtils eu = new EvaluationUtils(); weka.classifiers.meta.LogitBoost classifier = new weka.classifiers.meta.LogitBoost(); classifier.setNumIterations(20); ArrayList<Prediction> predictions = eu.getTrainTestPredictions( classifier, inst, inst); Instances result = tc.getCurve(predictions); System.out.println(result); } catch (Exception ex) { ex.printStackTrace(); } } }
/** * Constructor that sets default base learner. */ public TLC() { m_Classifier = new LogitBoost(); }
if ((m_InitialFs != null) && getUseEstimatedPriors()) { text.append("Initial Fs: \n"); for (int j = 0; j < m_NumClasses; j++) { text.append("\n\tClass " + (j + 1) + " (" + m_ClassAttribute.name() + "=" + m_ClassAttribute.value(j) + "): " + Utils.doubleToString(m_InitialFs[j], getNumDecimalPlaces()) + "\n");
getCapabilities().testWithFail(data); negativeLogLikelihood(m_trainYs, m_probs, m_data, m_sumOfWeights); if (m_Debug) { System.err.println("Avg. negative log-likelihood: " + m_logLikelihood);
/** * Returns a string describing classifier * * @return a description suitable for displaying in the explorer/experimenter * gui */ public String globalInfo() { return "Class for performing additive logistic regression. \n" + "This class performs classification using a regression scheme as the " + "base learner, and can handle multi-class problems. For more " + "information, see\n\n" + getTechnicalInformation().toString(); }
if (getUseResampling()) { options.add("-Q"); } else { options.add("-P"); options.add("" + getWeightThreshold()); if (getUseEstimatedPriors()) { options.add("-use-estimated-priors"); options.add("" + getLikelihoodThreshold()); options.add("-H"); options.add("" + getShrinkage()); options.add("-Z"); options.add("" + getZMax()); options.add("" + getPoolSize()); options.add("" + getNumThreads()); if (getResume()) { options.add("-resume");
/** * Tests the MarginCurve generation from the command line. The classifier is * currently hardcoded. Pipe in an arff file. * * @param args currently ignored */ public static void main(String[] args) { try { Utils.SMALL = 0; Instances inst = new Instances(new java.io.InputStreamReader(System.in)); inst.setClassIndex(inst.numAttributes() - 1); MarginCurve tc = new MarginCurve(); EvaluationUtils eu = new EvaluationUtils(); weka.classifiers.meta.LogitBoost classifier = new weka.classifiers.meta.LogitBoost(); classifier.setNumIterations(20); ArrayList<Prediction> predictions = eu.getTrainTestPredictions( classifier, inst, inst); Instances result = tc.getCurve(predictions); System.out.println(result); } catch (Exception ex) { ex.printStackTrace(); } } }
/** Creates a default LogitBoost */ public Classifier getClassifier() { return new LogitBoost(); }
if ((m_InitialFs != null) && getUseEstimatedPriors()) { text.append("Initial Fs: \n"); for (int j = 0; j < m_NumClasses; j++) { text.append("\n\tClass " + (j + 1) + " (" + m_ClassAttribute.name() + "=" + m_ClassAttribute.value(j) + "): " + Utils.doubleToString(m_InitialFs[j], getNumDecimalPlaces()) + "\n");
getCapabilities().testWithFail(data); m_logLikelihood = negativeLogLikelihood(m_trainYs, m_probs, m_data, m_sumOfWeights); if (m_Debug) { System.err.println("Avg. negative log-likelihood: " + m_logLikelihood);
/** * Returns a string describing classifier * * @return a description suitable for displaying in the explorer/experimenter * gui */ public String globalInfo() { return "Class for performing additive logistic regression. \n" + "This class performs classification using a regression scheme as the " + "base learner, and can handle multi-class problems. For more " + "information, see\n\n" + getTechnicalInformation().toString(); }
/** * Main method for testing this class. * * @param argv the options */ public static void main(String[] argv) { runClassifier(new LogitBoost(), argv); } }
/** Creates a default LogitBoost */ public Classifier getClassifier() { return new LogitBoost(); }