/** * * * @param classifier * @throws Exception */ public void buildDefaultClassifier() throws Exception { Classifier classifier = new GaussianProcesses() ; snippetWeighter.train(classifier, trainingDataset) ; }
/** * Gives standard deviation of the prediction at the given instance. * * @param inst the instance to get the standard deviation for * @return the standard deviation * @throws Exception if computation fails */ public double getStandardDeviation(Instance inst) throws Exception { inst = filterInstance(inst); // Build K vector (and Kappa) Vector k = new DenseVector(m_NumTrain); for (int i = 0; i < m_NumTrain; i++) { k.set(i, m_weights[i] * m_actualKernel.eval(-1, i, inst)); } return computeStdDev(inst, k) / m_Alin; }
/** * Returns an enumeration describing the available options. * * @return an enumeration of all the available options. */ @Override public Enumeration<Option> listOptions() { java.util.Vector<Option> result = new java.util.Vector<Option>(); result.addElement(new Option( "\tLevel of Gaussian Noise wrt transformed target." + " (default 1)", "L", 1, "-L <double>")); result.addElement(new Option( "\tWhether to 0=normalize/1=standardize/2=neither. " + "(default 0=normalize)", "N", 1, "-N")); result.addElement(new Option("\tThe Kernel to use.\n" + "\t(default: weka.classifiers.functions.supportVector.PolyKernel)", "K", 1, "-K <classname and parameters>")); result.addAll(Collections.list(super.listOptions())); result.addElement(new Option("", "", 0, "\nOptions specific to kernel " + getKernel().getClass().getName() + ":")); result .addAll(Collections.list(((OptionHandler) getKernel()).listOptions())); return result.elements(); }
/** * Main method for testing this class. * * @param argv the commandline parameters */ public static void main(String[] argv) { runClassifier(new GaussianProcesses(), argv); } }
setNoise(Double.parseDouble(tmpStr)); } else { setNoise(1); setFilterType(new SelectedTag(Integer.parseInt(tmpStr), TAGS_FILTER)); } else { setFilterType(new SelectedTag(FILTER_NORMALIZE, TAGS_FILTER)); tmpStr = tmpOptions[0]; tmpOptions[0] = ""; setKernel(Kernel.forName(tmpStr, tmpOptions));
/** * Gets the current settings of the classifier. * * @return an array of strings suitable for passing to setOptions */ @Override public String[] getOptions() { java.util.Vector<String> result = new java.util.Vector<String>(); result.addElement("-L"); result.addElement("" + getNoise()); result.addElement("-N"); result.addElement("" + m_filterType); result.addElement("-K"); result.addElement("" + m_kernel.getClass().getName() + " " + Utils.joinOptions(m_kernel.getOptions())); Collections.addAll(result, super.getOptions()); return result.toArray(new String[result.size()]); }
/** * Classifies a given instance. * * @param inst the instance to be classified * @return the classification * @throws Exception if instance could not be classified successfully */ @Override public double classifyInstance(Instance inst) throws Exception { // Filter instance inst = filterInstance(inst); // Build K vector Vector k = new DenseVector(m_NumTrain); for (int i = 0; i < m_NumTrain; i++) { k.set(i, m_weights[i] * m_actualKernel.eval(-1, i, inst)); } double result = (k.dot(m_t) + m_avg_target - m_Blin) / m_Alin; return result; }
getCapabilities().testWithFail(insts); if (getCapabilities().handles(Capability.NUMERIC_ATTRIBUTES)) { boolean onlyNumeric = true; if (!m_checksTurnedOff) {
/** * Main method for testing this class. * * @param argv the commandline parameters */ public static void main(String[] argv) { runClassifier(new GaussianProcesses(), argv); } }
setNoise(Double.parseDouble(tmpStr)); } else { setNoise(1); setFilterType(new SelectedTag(Integer.parseInt(tmpStr), TAGS_FILTER)); } else { setFilterType(new SelectedTag(FILTER_NORMALIZE, TAGS_FILTER)); tmpStr = tmpOptions[0]; tmpOptions[0] = ""; setKernel(Kernel.forName(tmpStr, tmpOptions));
/** * Gets the current settings of the classifier. * * @return an array of strings suitable for passing to setOptions */ @Override public String[] getOptions() { java.util.Vector<String> result = new java.util.Vector<String>(); result.addElement("-L"); result.addElement("" + getNoise()); result.addElement("-N"); result.addElement("" + m_filterType); result.addElement("-K"); result.addElement("" + m_kernel.getClass().getName() + " " + Utils.joinOptions(m_kernel.getOptions())); Collections.addAll(result, super.getOptions()); return result.toArray(new String[result.size()]); }
/** * Classifies a given instance. * * @param inst the instance to be classified * @return the classification * @throws Exception if instance could not be classified successfully */ @Override public double classifyInstance(Instance inst) throws Exception { // Filter instance inst = filterInstance(inst); // Build K vector Vector k = new DenseVector(m_NumTrain); for (int i = 0; i < m_NumTrain; i++) { k.set(i, m_weights[i] * m_actualKernel.eval(-1, i, inst)); } double result = (k.dot(m_t) + m_avg_target - m_Blin) / m_Alin; return result; }
getCapabilities().testWithFail(insts); if (getCapabilities().handles(Capability.NUMERIC_ATTRIBUTES)) { boolean onlyNumeric = true; if (!m_checksTurnedOff) {
/** * * * @param classifier * @throws Exception */ public void buildDefaultClassifier() throws Exception { Classifier classifier = new GaussianProcesses() ; relatednessMeasurer.train(classifier, trainingDataset) ; }
/** * Gives standard deviation of the prediction at the given instance. * * @param inst the instance to get the standard deviation for * @return the standard deviation * @throws Exception if computation fails */ public double getStandardDeviation(Instance inst) throws Exception { inst = filterInstance(inst); // Build K vector (and Kappa) Vector k = new DenseVector(m_NumTrain); for (int i = 0; i < m_NumTrain; i++) { k.set(i, m_weights[i] * m_actualKernel.eval(-1, i, inst)); } return computeStdDev(inst, k) / m_Alin; }
/** * Returns an enumeration describing the available options. * * @return an enumeration of all the available options. */ @Override public Enumeration<Option> listOptions() { java.util.Vector<Option> result = new java.util.Vector<Option>(); result.addElement(new Option( "\tLevel of Gaussian Noise wrt transformed target." + " (default 1)", "L", 1, "-L <double>")); result.addElement(new Option( "\tWhether to 0=normalize/1=standardize/2=neither. " + "(default 0=normalize)", "N", 1, "-N")); result.addElement(new Option("\tThe Kernel to use.\n" + "\t(default: weka.classifiers.functions.supportVector.PolyKernel)", "K", 1, "-K <classname and parameters>")); result.addAll(Collections.list(super.listOptions())); result.addElement(new Option("", "", 0, "\nOptions specific to kernel " + getKernel().getClass().getName() + ":")); result .addAll(Collections.list(((OptionHandler) getKernel()).listOptions())); return result.elements(); }
/** Creates a default GaussianProcesses */ public Classifier getClassifier() { return new GaussianProcesses(); }
/** * Returns natural logarithm of density estimate for given value based on * given instance. * * @param inst the instance to make the prediction for. * @param value the value to make the prediction for. * @return the natural logarithm of the density estimate * @exception Exception if the density cannot be computed */ @Override public double logDensity(Instance inst, double value) throws Exception { inst = filterInstance(inst); // Build K vector (and Kappa) Vector k = new DenseVector(m_NumTrain); for (int i = 0; i < m_NumTrain; i++) { k.set(i, m_weights[i] * m_actualKernel.eval(-1, i, inst)); } double estimate = k.dot(m_t) + m_avg_target; double sigma = computeStdDev(inst, k); // transform to GP space value = value * m_Alin + m_Blin; // center around estimate value = value - estimate; double z = -Math.log(sigma * Math.sqrt(2 * Math.PI)) - value * value / (2.0 * sigma * sigma); return z + Math.log(m_Alin); }
/** * Returns default capabilities of the classifier. * * @return the capabilities of this classifier */ @Override public Capabilities getCapabilities() { Capabilities result = getKernel().getCapabilities(); result.setOwner(this); // attribute result.enableAllAttributeDependencies(); // with NominalToBinary we can also handle nominal attributes, but only // if the kernel can handle numeric attributes if (result.handles(Capability.NUMERIC_ATTRIBUTES)) { result.enable(Capability.NOMINAL_ATTRIBUTES); } result.enable(Capability.MISSING_VALUES); // class result.disableAllClasses(); result.disableAllClassDependencies(); result.disable(Capability.NO_CLASS); result.enable(Capability.NUMERIC_CLASS); result.enable(Capability.DATE_CLASS); result.enable(Capability.MISSING_CLASS_VALUES); return result; }
/** Creates a default GaussianProcesses */ public Classifier getClassifier() { return new GaussianProcesses(); }