double[] prediction=model.distributionForInstance(test.get(s1));
/** * Sets the instance at index i of the supplied dataset to be the current * instance * * @param i the index of the instance to be set * @throws UnsupportedOperationException if the full dataset has not been * retained in memory */ public void setInstance(int i) { if (!dataRetained) { throw new UnsupportedOperationException( "Unable to set the instance based " + "on index because the dataset has not been retained in memory"); } setInstance(dataset.get(i)); }
/** * Sets the instance at index i of the supplied dataset to be the current * instance * * @param i the index of the instance to be set * @throws UnsupportedOperationException if the full dataset has not been * retained in memory */ public void setInstance(int i) { if (!dataRetained) { throw new UnsupportedOperationException( "Unable to set the instance based " + "on index because the dataset has not been retained in memory"); } setInstance(dataset.get(i)); }
/** * Merge two datasets of Weka instances in place * @param first first (and destination) dataset * @param second second dataset */ public void mergeDataInPlace(Instances first, Instances second) { for(int i=0; i<second.numInstances(); i++) first.add(second.get(i)); }
/** * Merge two datasets of Weka instances in place * @param first first (and destination) dataset * @param second second dataset */ public void mergeDataInPlace(Instances first, Instances second) { for(int i=0; i<second.numInstances(); i++) first.add(second.get(i)); }
/** * Create leaf node based on the current split data * * @param data pointer to original data * @param indices indices at this node */ public LeafNode( final Instances data, ArrayList<Integer> indices) { this.probability = new double[ data.numClasses() ]; for(final Integer it : indices) { this.probability[ (int) data.get( it.intValue() ).classValue()] ++; } // Divide by the number of elements for(int i=0; i<data.numClasses(); i++) this.probability[i] /= (double) indices.size(); }
/** * Create leaf node based on the current split data * * @param data pointer to original data * @param indices indices at this node */ public LeafNode( final Instances data, ArrayList<Integer> indices) { this.probability = new double[ data.numClasses() ]; for(final Integer it : indices) { this.probability[ (int) data.get( it.intValue() ).classValue()] ++; } // Divide by the number of elements for(int i=0; i<data.numClasses(); i++) this.probability[i] /= (double) indices.size(); }
@Override public double getSimilarity(JCas jcas1, JCas jcas2, Annotation coveringAnnotation1, Annotation coveringAnnotation2) throws SimilarityException { // The feature generation needs to have happened before! DocumentMetaData md = DocumentMetaData.get(jcas1); int id = Integer.parseInt(md.getDocumentId().substring(md.getDocumentId().indexOf("-") + 1)); System.out.println(id); Instance testInst = test.get(id - 1); try { return filteredClassifier.classifyInstance(testInst); } catch (Exception e) { throw new SimilarityException(e); } } }
@Override public double getSimilarity(JCas jcas1, JCas jcas2, Annotation coveringAnnotation1, Annotation coveringAnnotation2) throws SimilarityException { // The feature generation needs to have happened before! DocumentMetaData md = DocumentMetaData.get(jcas1); int id = Integer.parseInt(md.getDocumentId().substring(md.getDocumentId().indexOf("-") + 1)); System.out.println(id); Instance testInst = test.get(id - 1); try { return filteredClassifier.classifyInstance(testInst); } catch (Exception e) { throw new SimilarityException(e); } } }
@Override public double getSimilarity(JCas jcas1, JCas jcas2, Annotation coveringAnnotation1, Annotation coveringAnnotation2) throws SimilarityException { // The feature generation needs to have happened before! DocumentMetaData md = DocumentMetaData.get(jcas1); int id = Integer.parseInt(md.getDocumentId().substring(md.getDocumentId().indexOf("-") + 1)); System.out.println(id); Instance testInst = test.get(id - 1); try { return filteredClassifier.classifyInstance(testInst); } catch (Exception e) { throw new SimilarityException(e); } } }
@Override public double getSimilarity(JCas jcas1, JCas jcas2, Annotation coveringAnnotation1, Annotation coveringAnnotation2) throws SimilarityException { // The feature generation needs to have happened before! DocumentMetaData md = DocumentMetaData.get(jcas1); int id = Integer.parseInt(md.getDocumentId()); System.out.println(id); Instance testInst = test.get(id - 1); try { return filteredClassifier.classifyInstance(testInst); } catch (Exception e) { throw new SimilarityException(e); } } }
@Override public double getSimilarity(JCas jcas1, JCas jcas2, Annotation coveringAnnotation1, Annotation coveringAnnotation2) throws SimilarityException { // The feature generation needs to have happened before! DocumentMetaData md = DocumentMetaData.get(jcas1); int id = Integer.parseInt(md.getDocumentId()); System.out.println(id); Instance testInst = test.get(id - 1); try { return filteredClassifier.classifyInstance(testInst); } catch (Exception e) { throw new SimilarityException(e); } } }
@Override public double getSimilarity(JCas jcas1, JCas jcas2, Annotation coveringAnnotation1, Annotation coveringAnnotation2) throws SimilarityException { // The feature generation needs to have happened before! DocumentMetaData md = DocumentMetaData.get(jcas1); int id = Integer.parseInt(md.getDocumentId()); System.out.println(id); Instance testInst = test.get(id - 1); try { return filteredClassifier.classifyInstance(testInst); } catch (Exception e) { throw new SimilarityException(e); } } }
public Clustering getClusteringResult() { Clustering clustering = null; weka.core.Instances wekaInstances= this.instanceConverter.wekaInstances(instances); try { clusterer.buildClusterer(wekaInstances); int numClusters = clusterer.numberOfClusters(); Instances dataset = getDataset(instances.numAttributes(), numClusters); List<Instance> newInstances = new ArrayList<Instance>() ; //Instances(dataset); for (int i = 0; i < wekaInstances.numInstances(); i++) { weka.core.Instance inst = wekaInstances.get(i); int cnum = clusterer.clusterInstance(inst); Instance newInst = new DenseInstance(instances.instance(cnum)); newInst.insertAttributeAt(inst.numAttributes()); newInst.setDataset(dataset); newInst.setClassValue(cnum); newInstances.add(newInst); } clustering = new Clustering(newInstances); } catch (Exception e) { e.printStackTrace(); } instances = null; return clustering; }
/** Test getDataSetIterator */ @Test public void testGetIteratorNominalClass() throws Exception { final Instances data = DatasetLoader.loadAngerMetaClassification(); final int batchSize = 1; final DataSetIterator it = this.cteii.getDataSetIterator(data, SEED, batchSize); Set<Integer> labels = new HashSet<>(); for (int i = 0; i < data.size(); i++) { Instance inst = data.get(i); int label = Integer.parseInt(inst.stringValue(data.classIndex())); final DataSet next = it.next(); int itLabel = next.getLabels().argMax().getInt(0); Assert.assertEquals(label, itLabel); labels.add(label); } final Set<Integer> collect = it.getLabels().stream().map(s -> Double.valueOf(s).intValue()).collect(Collectors.toSet()); Assert.assertEquals(2, labels.size()); Assert.assertTrue(labels.containsAll(collect)); Assert.assertTrue(collect.containsAll(labels)); }
protected void checkLayer(Dl4jMlpClassifier clf, Instances iris, String transformationLayerName, String clfPath) throws Exception { Instances activationsExpected = clf.getActivationsAtLayer(transformationLayerName, iris); Dl4jMlpFilter filter = new Dl4jMlpFilter(); filter.setModelFile(new File(clfPath)); filter.setTransformationLayerName(transformationLayerName); filter.setInputFormat(iris); Instances activationsActual = Filter.useFilter(iris, filter); for (int i = 0; i < activationsActual.size(); i++) { Instance expected = activationsExpected.get(i); Instance actual = activationsActual.get(i); for (int j = 0; j < expected.numAttributes(); j++) { assertEquals(expected.value(j), actual.value(j), 1e-6); } } } }
/** * tests the data whether the filter can actually handle it. * * @param instanceInfo the data to test * @throws Exception if the test fails */ @Override protected void testInputFormat(Instances instanceInfo) throws Exception { for (int i = 0; i < getRanges().length; i++) { Instances newi = new Instances(instanceInfo, 0); if (instanceInfo.size() > 0) { newi.add((Instance) instanceInfo.get(0).copy()); } Range range = getRanges()[i]; range.setUpper(instanceInfo.numAttributes() - 1); Instances subset = generateSubset(newi, range); getFilters()[i].setInputFormat(subset); } }
/** Test getDataSetIterator */ @Test public void testGetIteratorNumericClass() throws Exception { final Instances data = DatasetLoader.loadAngerMeta(); final int batchSize = 1; final DataSetIterator it = this.cteii.getDataSetIterator(data, SEED, batchSize); Set<Double> labels = new HashSet<>(); for (int i = 0; i < data.size(); i++) { Instance inst = data.get(i); double label = inst.value(data.classIndex()); final DataSet next = it.next(); double itLabel = next.getLabels().getDouble(0); Assert.assertEquals(label, itLabel, 1e-5); labels.add(label); } }
/** * tests the data whether the filter can actually handle it. * * @param instanceInfo the data to test * @throws Exception if the test fails */ @Override protected void testInputFormat(Instances instanceInfo) throws Exception { for (int i = 0; i < getRanges().length; i++) { Instances newi = new Instances(instanceInfo, 0); if (instanceInfo.size() > 0) { newi.add((Instance) instanceInfo.get(0).copy()); } Range range = getRanges()[i]; range.setUpper(instanceInfo.numAttributes() - 1); Instances subset = generateSubset(newi, range); getFilters()[i].setInputFormat(subset); } }
/** Test getDataSetIterator */ @Test public void testGetIteratorNumericClass() throws Exception { final Instances data = makeData(); final int batchSize = 1; final DataSetIterator it = this.cteii.getDataSetIterator(data, SEED, batchSize); Set<Double> labels = new HashSet<>(); for (int i = 0; i < data.size(); i++) { Instance inst = data.get(i); double label = inst.value(data.classIndex()); final DataSet next = it.next(); double itLabel = next.getLabels().getDouble(0); Assert.assertEquals(label, itLabel, 1e-5); labels.add(label); } }