/** Creates a default LinearNNSearch */ public NearestNeighbourSearch getNearestNeighbourSearch() { return new LinearNNSearch(); }
/** * Returns the nearest instance in the current neighbourhood to the supplied * instance. * * @param target The instance to find the nearest neighbour for. * @return the nearest instance * @throws Exception if the nearest neighbour could not be found. */ public Instance nearestNeighbour(Instance target) throws Exception { return (kNearestNeighbours(target, 1)).instance(0); }
/** * Gets the current settings. * * @return an array of strings suitable for passing to setOptions() */ public String[] getOptions() { Vector<String> result = new Vector<String>(); Collections.addAll(result, super.getOptions()); if (getSkipIdentical()) result.add("-S"); return result.toArray(new String[result.size()]); }
protected void buildInternal(MultiLabelInstances trainSet) throws Exception { if (trainSet.getNumInstances() < numOfNeighbors) { throw new IllegalArgumentException("The number of training instances is less than the number of requested nearest neighbours"); } train = new Instances(trainSet.getDataSet()); // label attributes don't influence distance estimation String labelIndicesString = ""; for (int i = 0; i < numLabels - 1; i++) { labelIndicesString += (labelIndices[i] + 1) + ","; } labelIndicesString += (labelIndices[numLabels - 1] + 1); dfunc.setAttributeIndices(labelIndicesString); dfunc.setInvertSelection(true); lnn = new LinearNNSearch(); lnn.setDistanceFunction(dfunc); lnn.setInstances(train); lnn.setMeasurePerformance(false); }
dfunc.setInvertSelection(true); lnn = new LinearNNSearch(); lnn.setSkipIdentical(true); lnn.setDistanceFunction(dfunc); lnn.setInstances(train); lnn.setMeasurePerformance(false); Instances knn = new Instances(lnn.kNearestNeighbours(train.instance(i), numOfNeighbors));
/** * weka Ibk style prediction * * @throws Exception if nearest neighbours search fails */ protected MultiLabelOutput makePredictionInternal(Instance instance) throws Exception { Instances knn = lnn.kNearestNeighbours(instance, numOfNeighbors); double[] distances = lnn.getDistances(); double[] confidences = getConfidences(knn, distances); boolean[] bipartition; MultiLabelOutput results = null; switch (extension) { case NONE: // BRknn results = new MultiLabelOutput(confidences, 0.5); break; case EXTA: // BRknn-a bipartition = labelsFromConfidences2(confidences); results = new MultiLabelOutput(bipartition, confidences); break; case EXTB: // BRknn-b bipartition = labelsFromConfidences3(confidences); results = new MultiLabelOutput(bipartition, confidences); break; } return results; }
/** * Parses a given list of options. <p/> * <!-- options-start --> * Valid options are: <p/> * * <pre> -S * Skip identical instances (distances equal to zero). * </pre> * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { super.setOptions(options); setSkipIdentical(Utils.getFlag('S', options)); Utils.checkForRemainingOptions(options); }
/** * Adds the given instance info. This implementation updates the range * datastructures of the DistanceFunction class. * * @param ins The instance to add the information of. Usually this is * the test instance supplied to update the range of * attributes in the distance function. */ public void addInstanceInfo(Instance ins) { if(m_Instances!=null) try{ update(ins); } catch(Exception ex) { ex.printStackTrace(); } }
neighbours = lnn.kNearestNeighbours(instance, cvMaxK); origDistances = lnn.getDistances();
/** * Parses a given list of options. <p/> * <!-- options-start --> * Valid options are: <p/> * * <pre> -S * Skip identical instances (distances equal to zero). * </pre> * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { super.setOptions(options); setSkipIdentical(Utils.getFlag('S', options)); Utils.checkForRemainingOptions(options); }
/** * Adds the given instance info. This implementation updates the range * datastructures of the DistanceFunction class. * * @param ins The instance to add the information of. Usually this is * the test instance supplied to update the range of * attributes in the distance function. */ public void addInstanceInfo(Instance ins) { if(m_Instances!=null) try{ update(ins); } catch(Exception ex) { ex.printStackTrace(); } }
/** Creates a default LinearNNSearch */ public NearestNeighbourSearch getNearestNeighbourSearch() { return new LinearNNSearch(); }
/** * Returns the nearest instance in the current neighbourhood to the supplied * instance. * * @param target The instance to find the nearest neighbour for. * @return the nearest instance * @throws Exception if the nearest neighbour could not be found. */ public Instance nearestNeighbour(Instance target) throws Exception { return (kNearestNeighbours(target, 1)).instance(0); }
/** * Gets the current settings. * * @return an array of strings suitable for passing to setOptions() */ public String[] getOptions() { Vector<String> result = new Vector<String>(); Collections.addAll(result, super.getOptions()); if (getSkipIdentical()) result.add("-S"); return result.toArray(new String[result.size()]); }
NearestNeighbourSearch.class, className, searchMethodSpec)); } else { setSearchMethod(new LinearNNSearch());
knn = new Instances(lnn.kNearestNeighbours(instance, numOfNeighbors)); } catch (Exception ex) { Logger.getLogger(MLkNN.class.getName()).log(Level.SEVERE, null, ex);
NearestNeighbourSearch.class, className, searchMethodSpec)); } else { setSearchMethod(new LinearNNSearch());
double[] confidences = new double[numLabels]; Instances knn = new Instances(lnn.kNearestNeighbours(instance, numOfNeighbors));
this.setNearestNeighbourSearchAlgorithm(new LinearNNSearch());
Instances knn = new Instances(lnn.kNearestNeighbours(train.instance(i), numOfNeighbors));