/** * Main method for testing this class. * * @param argv should contain the following arguments: * -t training file [-T test file] [-c class index] */ public static void main(String [] argv) { runClassifier(new AdditiveRegression(), argv); } }
/** * Method used to build the classifier. */ public void buildClassifier(Instances data) throws Exception { // Initialize classifier initializeClassifier(data); // For the given number of iterations while (next()) {}; // Clean up done(); }
/** * Gets the current settings of the Classifier. * * @return an array of strings suitable for passing to setOptions */ public String [] getOptions() { Vector<String> options = new Vector<String>(); options.add("-S"); options.add("" + getShrinkage()); if (getMinimizeAbsoluteError()) { options.add("-A"); } if (getResume()) { options.add("-resume"); } Collections.addAll(options, super.getOptions()); return options.toArray(new String[0]); }
/** * Gets the current settings of the Classifier. * * @return an array of strings suitable for passing to setOptions */ public String [] getOptions() { Vector<String> options = new Vector<String>(); options.add("-S"); options.add("" + getShrinkage()); if (getMinimizeAbsoluteError()) { options.add("-A"); } Collections.addAll(options, super.getOptions()); return options.toArray(new String[0]); }
getCapabilities().testWithFail(data); if (getMinimizeAbsoluteError()) { m_InitialPrediction = m_Data.kthSmallestValue(m_Data.classIndex(), m_Data.numInstances() / 2); } else { m_Data = residualReplace(m_Data, m_InitialPrediction); m_Diff = Double.MAX_VALUE; for (int i = 0; i < m_Data.numInstances(); i++) { if (getMinimizeAbsoluteError()) { m_Error += m_Data.instance(i).weight() * Math.abs(m_Data.instance(i).classValue()); } else { if (getMinimizeAbsoluteError()) { System.err.println("Sum of absolute residuals (predicting the median) : " + m_Error); } else {
/** Creates a default AdditiveRegression */ public Classifier getClassifier() { return new AdditiveRegression(); }
m_Classifiers.get(m_Classifiers.size() - 1).buildClassifier(m_Data); m_Data = residualReplace(m_Data, m_Classifiers.get(m_Classifiers.size() - 1)); double sum = 0; for (int i = 0; i < m_Data.numInstances(); i++) { if (getMinimizeAbsoluteError()) { sum += m_Data.instance(i).weight() * Math.abs(m_Data.instance(i).classValue()); } else { if (getMinimizeAbsoluteError()) { System.err.println("Sum of absolute residuals: " + sum); } else {
/** * Classify an instance. * * @param inst the instance to predict * @return a prediction for the instance * @throws Exception if an error occurs */ public double classifyInstance(Instance inst) throws Exception { double prediction = m_InitialPrediction; // default model? if (!m_SuitableData) { return prediction; } for (Classifier classifier : m_Classifiers) { double toAdd = classifier.classifyInstance(inst); if (Utils.isMissingValue(toAdd)) { throw new UnassignedClassException("AdditiveRegression: base learner predicted missing value."); } prediction += (toAdd * getShrinkage()); } return prediction; }
/** * Returns the value of the named measure * @param additionalMeasureName the name of the measure to query for its value * @return the value of the named measure * @throws IllegalArgumentException if the named measure is not supported */ public double getMeasure(String additionalMeasureName) { if (additionalMeasureName.compareToIgnoreCase("measureNumIterations") == 0) { return measureNumIterations(); } else { throw new IllegalArgumentException(additionalMeasureName + " not supported (AdditiveRegression)"); } }
/** * Returns a string describing this attribute evaluator * @return a description of the evaluator suitable for * displaying in the explorer/experimenter gui */ public String globalInfo() { return " Meta classifier that enhances the performance of a regression " +"base classifier. Each iteration fits a model to the residuals left " +"by the classifier on the previous iteration. Prediction is " +"accomplished by adding the predictions of each classifier. " +"Reducing the shrinkage (learning rate) parameter helps prevent " +"overfitting and has a smoothing effect but increases the learning " +"time.\n\n" +"For more information see:\n\n" + getTechnicalInformation().toString(); }
+ getClassifier().getClass().getName() + "\n\n"); text.append("" + m_Classifiers.size() + " models generated.\n");
getCapabilities().testWithFail(data); if (getMinimizeAbsoluteError()) { m_InitialPrediction = m_Data .kthSmallestValue(m_Data.classIndex(), m_Data.numInstances() / 2); m_Data = residualReplace(m_Data, m_InitialPrediction); m_Diff = Double.MAX_VALUE; for (int i = 0; i < m_Data.numInstances(); i++) { if (getMinimizeAbsoluteError()) { m_Error += m_Data.instance(i).weight() * Math.abs(m_Data.instance(i).classValue()); } else { if (getMinimizeAbsoluteError()) { System.err.println( "Sum of absolute residuals (predicting the median) : " + m_Error);
/** Creates a default AdditiveRegression */ public Classifier getClassifier() { return new AdditiveRegression(); }
m_Classifiers.get(m_Classifiers.size() - 1).buildClassifier(m_Data); m_Data = residualReplace(m_Data, m_Classifiers.get(m_Classifiers.size() - 1)); double sum = 0; for (int i = 0; i < m_Data.numInstances(); i++) { if (getMinimizeAbsoluteError()) { sum += m_Data.instance(i).weight() * Math.abs(m_Data.instance(i).classValue()); } else { if (getMinimizeAbsoluteError()) { System.err.println("Sum of absolute residuals: " + sum); } else {
/** * Classify an instance. * * @param inst the instance to predict * @return a prediction for the instance * @throws Exception if an error occurs */ public double classifyInstance(Instance inst) throws Exception { double prediction = m_InitialPrediction; // default model? if (!m_SuitableData) { return prediction; } for (Classifier classifier : m_Classifiers) { double toAdd = classifier.classifyInstance(inst); if (Utils.isMissingValue(toAdd)) { throw new UnassignedClassException("AdditiveRegression: base learner predicted missing value."); } prediction += (toAdd * getShrinkage()); } return prediction; }
/** * Returns the value of the named measure * @param additionalMeasureName the name of the measure to query for its value * @return the value of the named measure * @throws IllegalArgumentException if the named measure is not supported */ public double getMeasure(String additionalMeasureName) { if (additionalMeasureName.compareToIgnoreCase("measureNumIterations") == 0) { return measureNumIterations(); } else { throw new IllegalArgumentException(additionalMeasureName + " not supported (AdditiveRegression)"); } }
/** * Returns a string describing this attribute evaluator * @return a description of the evaluator suitable for * displaying in the explorer/experimenter gui */ public String globalInfo() { return " Meta classifier that enhances the performance of a regression " +"base classifier. Each iteration fits a model to the residuals left " +"by the classifier on the previous iteration. Prediction is " +"accomplished by adding the predictions of each classifier. " +"Reducing the shrinkage (learning rate) parameter helps prevent " +"overfitting and has a smoothing effect but increases the learning " +"time.\n\n" +"For more information see:\n\n" + getTechnicalInformation().toString(); }
+ getClassifier().getClass().getName() + "\n\n"); text.append("" + m_Classifiers.size() + " models generated.\n");
/** * Method used to build the classifier. */ public void buildClassifier(Instances data) throws Exception { // Initialize classifier initializeClassifier(data); // For the given number of iterations while (next()) {}; // Clean up done(); }
/** * Main method for testing this class. * * @param argv should contain the following arguments: * -t training file [-T test file] [-c class index] */ public static void main(String [] argv) { runClassifier(new AdditiveRegression(), argv); } }