/** * Stack two Instances together row-wise. */ public static final Instances combineInstances(Instances D1, Instances D2) { Instances D = new Instances(D1); for(int i = 0; i < D2.numInstances(); i++) { D.add(D2.instance(i)); } return D; }
JFrame1 form = new JFrame1(); form.setVisible(true); form.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent pce) { // Handle the change here String pth = (String) pce.getNewValue(); BufferedReader datafile = readDataFile(pth); Instances data = new Instances(datafile); data.setClassIndex(data.numAttributes() - 1); (...) } });
public static void LoadAndTest(String filename_test, String filename_model) throws Exception { BufferedReader datafile_test = readDataFile(filename_test); Instances data_test = new Instances(datafile_test); data_test.setClassIndex(data_test.numAttributes() - 1); Classifier cls = (Classifier) weka.core.SerializationHelper.read(filename_model); int act = 0; for (int i = 0; i < data_test.numInstances(); i++) { double pred = cls.classifyInstance(data_test.instance(i)); double real = data_test.instance(i).classValue(); if (pred==real) { act = act + 1; } } double pct = (double) act / (double) data_test.numInstances(); System.out.println("Accuracy = " + pct); }
/** * Transform - transform dataset D for this node. * this.j defines the current node index, e.g., 3 * this.paY[] defines parents, e.g., [1,4] * we should remove the rest, e.g., [0,2,5,...,L-1] * @return dataset we should remove all variables from D EXCEPT current node, and parents. */ public Instances transform(Instances D) throws Exception { int L = D.classIndex(); d = D.numAttributes() - L; int keep[] = A.append(this.paY,j); // keep all parents and self! Arrays.sort(keep); int remv[] = A.invert(keep,L); // i.e., remove the rest < L Arrays.sort(remv); map = new int[L]; for(int j = 0; j < L; j++) { map[j] = Arrays.binarySearch(keep,j); } Instances D_ = F.remove(new Instances(D),remv, false); D_.setClassIndex(map[this.j]); return D_; }
@Override public Instance transformInstance(Instance x) throws Exception{ Instances tmpInst = new Instances(x.dataset()); tmpInst.delete(); tmpInst.add(x); Instances features = this.extractPart(tmpInst, false); Instances pseudoLabels = new Instances(this.compressedTemplateInst); Instance tmpin = pseudoLabels.instance(0); pseudoLabels.delete(); pseudoLabels.add(tmpin); for ( int i = 0; i< pseudoLabels.classIndex(); i++) { pseudoLabels.instance(0).setMissing(i); } Instances newDataSet = Instances.mergeInstances(pseudoLabels, features); newDataSet.setClassIndex(pseudoLabels.numAttributes()); return newDataSet.instance(0); }
/** * bag class for getting the result of the loaded classifier */ private static class LoadedClassifier { private AbstractClassifier newClassifier = null; private Instances newHeader = null; }
protected Instances convert(Instances D, int j, int k) { int L = D.classIndex(); D = new Instances(D); D.insertAttributeAt(classAttribute,0); D.setClassIndex(0); for(int i = 0; i < D.numInstances(); i++) { String c = (String)((int)Math.round(D.instance(i).value(j+1))+""+(int)Math.round(D.instance(i).value(k+1))); D.instance(i).setClassValue(c); } for (int i = 0; i < L; i++) D.deleteAttributeAt(1); m_InstancesTemplate = new Instances(D,0); return D; }
public GroupFeature(List<FeatureExtractor> features) { this.features = ImmutableList.copyOf(features); ImmutableList.Builder<Attribute> result = ImmutableList.builder(); for (FeatureExtractor fe: this.features) { for (Attribute att: fe.attributes()) { result.add((Attribute)att.copy()); } } _attributes = result.build(); _instances = new Instances("FOO", newArrayList(_attributes), 0); result = ImmutableList.builder(); for (int i = 0; i < _instances.numAttributes(); i++) { result.add(_instances.attribute(i)); } _attributes = result.build(); }
/** * performs a typical test */ public void testTypical() { Instances icopy = new Instances(m_Instances); m_Filter = getFilter(); Instances result = useFilter(); assertEquals(result.numAttributes(), icopy.numInstances() + 1); }
public void testPruneMinFreq() throws Exception { Instances data1 = getData1(); Instances structure = new Instances(data1, 0); DictionaryBuilder builder = new DictionaryBuilder(); builder.setMinTermFreq(1); builder.setup(structure); for (int i = 0; i < data1.numInstances(); i++) { builder.processInstance(data1.instance(i)); } assertEquals(15, builder.getDictionaries(false)[0].size()); Map<String, int[]> consolidated = builder.finalizeDictionary(); // min freq of 1 should keep all terms assertEquals(15, consolidated.size()); }
public static double[][] LEAD(Instances D, Classifier h, Random r, String MDType) throws Exception { Instances D_r = new Instances(D); D_r.randomize(r); Instances D_train = new Instances(D_r,0,D_r.numInstances()*60/100); Instances D_test = new Instances(D_r,D_train.numInstances(),D_r.numInstances()-D_train.numInstances()); BR br = new BR(); br.setClassifier(h); Result result = Evaluation.evaluateModel((MultiLabelClassifier)br,D_train,D_test,"PCut1","1"); return LEAD(D_test, result, MDType); }
/** * performs the application with no options set */ public void testDefault() { Instances icopy = new Instances(m_Instances); m_Filter = getFilter(); Instances result = useFilter(); assertEquals(result.numAttributes(), icopy.numAttributes()); }
Operator() { ArrayList<Attribute> a = new ArrayList<Attribute>(); for (int i=0; i<attrs.length-1; i++) { a.add(new Attribute(attrs[i])); // numeric } ArrayList<String> d = new ArrayList<String>(); d.add("false"); d.add("true"); a.add(new Attribute(attrs[attrs.length-1], d)); // nominal attribute data = new Instances("Buh", a, 0); data.setClassIndex(attrs.length-1); // the CLASS } }
private Instances parseTransactionsMustContain(Instances data) { String[] split = m_transactionsMustContain.trim().split(","); boolean[] transactionsMustContainIndexes = new boolean[data.numAttributes()]; int numInTransactionsMustContainList = split.length; for (String element : split) { String attName = element.trim(); Attribute att = data.attribute(attName); if (att == null) { System.err.println("[FPGrowth] : WARNING - can't find attribute " + attName + " in the data."); numInTransactionsMustContainList--; } else { transactionsMustContainIndexes[att.index()] = true; } } if (numInTransactionsMustContainList == 0) { return data; } else { Instances newInsts = new Instances(data, 0); for (int i = 0; i < data.numInstances(); i++) { if (passesMustContain(data.instance(i), transactionsMustContainIndexes, numInTransactionsMustContainList)) { newInsts.add(data.instance(i)); } } newInsts.compactify(); return newInsts; } }
/** * Transform - transform dataset D for this node. * this.j defines the current node index, e.g., 3 * this.paY[] defines parents, e.g., [1,4] * we should remove the rest, e.g., [0,2,5,...,L-1] * @return dataset we should remove all variables from D EXCEPT current node, and parents. */ public Instances transform(Instances D) throws Exception { int L = D.classIndex(); d = D.numAttributes() - L; int keep[] = A.append(this.paY,j); // keep all parents and self! Arrays.sort(keep); int remv[] = A.invert(keep,L); // i.e., remove the rest < L Arrays.sort(remv); map = new int[L]; for(int j = 0; j < L; j++) { map[j] = Arrays.binarySearch(keep,j); } Instances D_ = F.remove(new Instances(D),remv, false); D_.setClassIndex(map[this.j]); return D_; }
/** * Stack two Instances together row-wise. */ public static final Instances combineInstances(Instances D1, Instances D2) { Instances D = new Instances(D1); for(int i = 0; i < D2.numInstances(); i++) { D.add(D2.instance(i)); } return D; }
@Override public Instance transformInstance(Instance x) throws Exception{ Instances tmpInst = new Instances(x.dataset()); tmpInst.delete(); tmpInst.add(x); Instances features = this.extractPart(tmpInst, false); Instances pseudoLabels = new Instances(this.compressedTemplateInst); Instance tmpin = pseudoLabels.instance(0); pseudoLabels.delete(); pseudoLabels.add(tmpin); for ( int i = 0; i< pseudoLabels.classIndex(); i++) { pseudoLabels.instance(0).setMissing(i); } Instances newDataSet = Instances.mergeInstances(pseudoLabels, features); newDataSet.setClassIndex(pseudoLabels.numAttributes()); return newDataSet.instance(0); }
/** * bag class for getting the result of the loaded classifier */ private static class LoadedClassifier { private AbstractClassifier newClassifier = null; private Instances newHeader = null; }
/** * Load the diabetes arff file * * @return Diabetes data as Instances * @throws Exception IO error. */ public static Instances loadDiabetes() throws Exception { Instances data = new Instances(new FileReader("src/test/resources/numeric/diabetes_numeric.arff")); data.setClassIndex(data.numAttributes() - 1); return data; }