protected void copyInstances(int from, Instances dest, int num) { for (int i = 0; i < num; i++) { dest.add(instance(from + i)); } }
@Override public void trainOnInstanceImpl(Instance inst) { if (instances == null) { instances = getDataset(inst.numAttributes(), 0); } instances.add(inst); }
/** * Read instance. * * @param fileReader the file reader * @return true, if successful */ public boolean readInstance(Reader fileReader) { //ArffReader arff = new ArffReader(reader, this, m_Lines, 1); Instance inst = arff.readInstance(); if (inst != null) { inst.setDataset(this); add(inst); return true; } else { return false; } }
public boolean readInstance(Reader fileReader) { // ArffReader arff = new ArffReader(reader, this, m_Lines, 1); if (arff == null) { arff = new ArffLoader(fileReader,0,this.classAttribute); } Instance inst = arff.readInstance(fileReader); if (inst != null) { inst.setDataset(this); add(inst); return true; } else { return false; } }
@Override public void trainOnInstanceImpl(Instance inst) { this.initVariables(); this.classDistributions[(int) inst.classValue()]++; this.currentChunk.add(inst); this.processedInstances++; if (this.processedInstances % this.chunkSize == 0) { this.processChunk(); } }
/** * Creates a new leaf <code>CNode</code> instance. * * @param numAttributes the number of attributes in the data * @param leafInstance the instance to store at this leaf */ public CNode(int numAttributes, Instance leafInstance) { this(numAttributes); if (m_clusterInstances == null) { //System.out.println(leafInstance.numAttributes()+"-"+leafInstance.value(0)+"-"+leafInstance.value(1)+"-"+leafInstance.value(2)); //System.out.println(leafInstance.numAttributes()+"-"+leafInstance.attribute(0).type()+"-"+leafInstance.attribute(1).type()+"-"+leafInstance.attribute(2).type()); m_clusterInstances = new Instances(leafInstance.dataset(), 1); } m_clusterInstances.add(leafInstance); updateStats(leafInstance, false); }
@Override public void trainOnInstanceImpl(Instance inst) { this.initVariables(); this.classDistributions[(int) inst.classValue()]++; this.currentChunk.add(inst); this.processedInstances++; if (this.processedInstances % this.chunkSizeOption.getValue() == 0) { this.processChunk(); } }
/** * Samoa instances from weka instances. * * @param instances the instances * @return the instances */ public Instances samoaInstances(weka.core.Instances instances) { Instances samoaInstances = samoaInstancesInformation(instances); //We assume that we have only one samoaInstanceInformation for WekaToSamoaInstanceConverter this.samoaInstanceInformation = samoaInstances; for (int i = 0; i < instances.numInstances(); i++) { samoaInstances.add(samoaInstance(instances.instance(i))); } return samoaInstances; }
@Override public void trainOnInstanceImpl(Instance inst) { if (inst.classValue() > C) { C = (int) inst.classValue(); } if (this.window == null) { this.window = new Instances(inst.dataset()); } for (int i = 0; i < this.window.size(); i++) { if (this.classifierRandom.nextDouble() > this.prob) { this.window.delete(i); } } this.window.add(inst); }
this.window.add(inst); this.timeStamp.add(this.time); this.time++;
@Override public void trainOnInstanceImpl(Instance inst) { if (recentChunk == null) recentChunk = new Instances(this.getModelContext()); if (recentChunk.size() < this.tau_size) recentChunk.add(inst); else recentChunk.set(this.nbInstances % this.tau_size,inst); trainAndClassify(inst); if ((this.nbInstances % this.tau_size)==0) takeSnapshot(); }
/** * Adds the supplied node as a child of this node. All of the child's * instances are added to this nodes instances * * @param child the child to add */ protected void addChildNode(CNode child) { for (int i = 0; i < child.m_clusterInstances.numInstances(); i++) { Instance temp = child.m_clusterInstances.instance(i); m_clusterInstances.add(temp); updateStats(temp, false); } if (m_children == null) { m_children = new FastVector(); } m_children.addElement(child); }
/** * Makes sure that the STM and LTM combined doe not surpass the maximum size. */ private void memorySizeCheck(){ if (this.stm.numInstances() + this.ltm.numInstances() > this.maxSTMSize + this.maxLTMSize){ if (this.ltm.numInstances() > this.maxLTMSize){ this.clusterDown(); }else{ //shift values from STM directly to LTM since STM is full int numShifts = this.maxLTMSize - this.ltm.numInstances() + 1; for (int i = 0; i < numShifts; i++){ this.ltm.add(this.stm.get(0).copy()); this.stm.delete(0); this.stmHistory.remove(0); this.ltmHistory.remove(0); this.cmHistory.remove(0); } this.clusterDown(); this.predictionHistories.clear(); for (int i = 0; i < this.stm.numInstances(); i++){ for (int j = 0; j < this.stm.numInstances(); j++){ this.distanceMatrixSTM[i][j] = this.distanceMatrixSTM[numShifts+i][numShifts+j]; } } } } }
Instance inst = new InstanceImpl(1, attributes); inst.setDataset(this.ltm); this.ltm.add(inst);
@Override public void trainOnInstanceImpl(Instance inst) { if (inst.classValue() > C) C = (int)inst.classValue(); if (this.window == null) { this.window = new Instances(inst.dataset()); } if (this.limitOption.getValue() <= this.window.numInstances()) { this.window.delete(0); } this.window.add(inst); }
@Override public Example<Instance> nextInstance() { // a value between 0.0 and 1.0 uniformly distributed double p = random.nextDouble(); int iClass = -1; // loops over all class probabilities to see from which class the next instance should be from while(p > 0.0){ iClass++; p -= probPerClass[iClass]; } // keeps on creating and storing instances until we have an instance for the desired class while(instancesBuffer[iClass].size() == 0){ Example<Instance> inst = originalStream.nextInstance(); instancesBuffer[(int) inst.getData().classValue()].add(inst.getData()); } // retrieves the instance from the desired class Instance instance = instancesBuffer[iClass].get(0); // and also removes it from the buffer instancesBuffer[iClass].delete(0); return new InstanceExample(instance); }
public void weka() { try{ Class.forName("weka.gui.Logger"); } catch (Exception e){ m_logPanel.addText("Please add weka.jar to the classpath to use the Weka explorer."); return; } Clustering wekaClustering; wekaClustering = null; if(wekaClustering == null || wekaClustering.size()==0){ m_logPanel.addText("Empty Clustering"); return; } int dims = wekaClustering.get(0).getCenter().length; FastVector attributes = new FastVector(); for(int i = 0; i < dims; i++) attributes.addElement( new Attribute("att" + i) ); Instances instances = new Instances("trainset",attributes,0); for(int c = 0; c < wekaClustering.size(); c++){ Cluster cluster = wekaClustering.get(c); Instance inst = new DenseInstance(cluster.getWeight(), cluster.getCenter()); inst.setDataset(instances); instances.add(inst); } WekaExplorer explorer = new WekaExplorer(instances); }
public void weka() { try{ Class.forName("weka.gui.Logger"); } catch (Exception e){ m_logPanel.addText("Please add weka.jar to the classpath to use the Weka explorer."); return; } Clustering wekaClustering; if(m_clusterer0.implementsMicroClusterer() && m_clusterer0.evaluateMicroClusteringOption.isSet()) wekaClustering = micro0; else wekaClustering = macro0; if(wekaClustering == null || wekaClustering.size()==0){ m_logPanel.addText("Empty Clustering"); return; } int dims = wekaClustering.get(0).getCenter().length; FastVector attributes = new FastVector(); for(int i = 0; i < dims; i++) attributes.addElement( new Attribute("att" + i) ); Instances instances = new Instances("trainset",attributes,0); for(int c = 0; c < wekaClustering.size(); c++){ Cluster cluster = wekaClustering.get(c); Instance inst = new DenseInstance(cluster.getWeight(), cluster.getCenter()); inst.setDataset(instances); instances.add(inst); } WekaExplorer explorer = new WekaExplorer(instances); }
@Override protected Object doTaskImpl(TaskMonitor monitor, ObjectRepository repository) { InstanceStream stream = (InstanceStream) getPreparedClassOption(this.streamOption); Instances cache = new Instances(stream.getHeader(), 0); monitor.setCurrentActivity("Caching instances...", -1.0); while ((cache.numInstances() < this.maximumCacheSizeOption.getValue()) && stream.hasMoreInstances()) { cache.add(stream.nextInstance().getData()); if (cache.numInstances() % MainTask.INSTANCES_BETWEEN_MONITOR_UPDATES == 0) { if (monitor.taskShouldAbort()) { return null; } long estimatedRemainingInstances = stream.estimatedRemainingInstances(); long maxRemaining = this.maximumCacheSizeOption.getValue() - cache.numInstances(); if ((estimatedRemainingInstances < 0) || (maxRemaining < estimatedRemainingInstances)) { estimatedRemainingInstances = maxRemaining; } monitor.setCurrentActivityFractionComplete(estimatedRemainingInstances < 0 ? -1.0 : (double) cache.numInstances() / (double) (cache.numInstances() + estimatedRemainingInstances)); } } monitor.setCurrentActivity("Shuffling instances...", -1.0); cache.randomize(new Random(this.shuffleRandomSeedOption.getValue())); return new CachedInstancesStream(cache); }
neighbours.add(m_Instances.instance(indices[idx]));