/** * Learns the parameters of a TAN structure using the {@link ParallelMaximumLikelihood}. * @param dataStream a stream of data instances for learning the parameters. * @param batchSize the size of the batch for the parallel ML algorithm. * @return a <code>BayesianNetwork</code> object in ADMIST format. * @throws ExceptionHugin */ public BayesianNetwork learn(DataStream<DataInstance> dataStream, int batchSize) throws ExceptionHugin { ParallelMLMissingData parameterLearningAlgorithm = new ParallelMLMissingData(); parameterLearningAlgorithm.setWindowsSize(batchSize); parameterLearningAlgorithm.setParallelMode(this.parallelMode); parameterLearningAlgorithm.setDAG(this.learnDAG(dataStream)); parameterLearningAlgorithm.setDataStream(dataStream); parameterLearningAlgorithm.initLearning(); parameterLearningAlgorithm.runLearning(); learnedBN = parameterLearningAlgorithm.getLearntBayesianNetwork(); this.inference = new HuginInference(); this.inference.setModel(this.learnedBN); return this.learnedBN; }
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a ParameterLearningAlgorithm object with the MaximumLikehood builder ParameterLearningAlgorithm parameterLearningAlgorithm = new ParallelMaximumLikelihood(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(getNaiveBayesStructure(data,0)); //We should invoke this method before processing any data parameterLearningAlgorithm.initLearning(); //Then we show how we can perform parameter learnig by a sequential updating of data batches. for (DataOnMemory<DataInstance> batch : data.iterableOverBatches(100)){ parameterLearningAlgorithm.updateModel(batch); } //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
public BayesianNetwork getModel(){ if (learningAlgorithm !=null){ this.learningAlgorithm.setDAG(plateauLDA.getDagLDA()); return this.learningAlgorithm.getLearntBayesianNetwork(); } if (learningAlgorithmFlink!=null) { this.learningAlgorithmFlink.setDAG(plateauLDAFlink.getDagLDA()); return this.learningAlgorithmFlink.getLearntBayesianNetwork(); } return null; }
/** * Updates the classifier with the given instance. * * @param instance the new training instance to include in the model * @exception Exception if the instance could not be incorporated in the * model. */ public void updateClassifier(Instance instance) throws Exception { DataOnMemoryListContainer<DataInstance> batch_ = new DataOnMemoryListContainer(attributes_); batch_.add(new DataInstanceFromDataRow(new DataRowWeka(instance, this.attributes_))); parameterLearningAlgorithm_.updateModel(batch_); bnModel_ = parameterLearningAlgorithm_.getLearntBayesianNetwork(); inferenceAlgorithm_.setModel(bnModel_); }
public double updateModel(DataStream<DataInstance> dataStream){ if (!initialized) initLearning(); return this.learningAlgorithm.updateModel(dataStream); }
public BayesianNetwork getModel(){ if (learningAlgorithm !=null){ return this.learningAlgorithm.getLearntBayesianNetwork(); } if (learningAlgorithmFlink!=null) return this.learningAlgorithmFlink.getLearntBayesianNetwork(); return null; }
/** * Constructor of a classifier which is initialized with the default arguments: * the last variable in attributes is the class variable and importance sampling * is the inference algorithm for making the predictions. * * @param attributes list of attributes of the classifier (i.e. its variables) * @throws WrongConfigurationException is thrown when the attributes passed are not suitable * for such classifier */ public HODE(Attributes attributes) throws WrongConfigurationException { super(attributes); this.setLearningAlgorithm(new ParallelMaximumLikelihood()); }
/** * Constructor of the classifier which is initialized with the default arguments: * the last variable in attributes is the class variable and importance sampling * is the inference algorithm for making the predictions. * @param attributes list of attributes of the classifier (i.e. its variables) * @throws WrongConfigurationException is thrown when the attributes passed are not suitable * for such classifier */ public NaiveBayesClassifier(Attributes attributes) throws WrongConfigurationException { super(attributes); this.setLearningAlgorithm(new ParallelMLMissingData()); }
public static DynamicPartialSufficientSatistics createZeroPartialSufficientStatistics(EF_DynamicBayesianNetwork ef_bayesianNetwork){ return new DynamicPartialSufficientSatistics(eu.amidst.core.learning.parametric.ParallelMLMissingData.PartialSufficientSatistics.createZeroPartialSufficientStatistics(ef_bayesianNetwork.getBayesianNetworkTime0()), eu.amidst.core.learning.parametric.ParallelMLMissingData.PartialSufficientSatistics.createZeroPartialSufficientStatistics(ef_bayesianNetwork.getBayesianNetworkTimeT())); }
public static DynamicPartialSufficientSatistics createInitPartialSufficientStatistics(EF_DynamicBayesianNetwork ef_bayesianNetwork){ return new DynamicPartialSufficientSatistics(eu.amidst.core.learning.parametric.ParallelMLMissingData.PartialSufficientSatistics.createInitPartialSufficientStatistics(ef_bayesianNetwork.getBayesianNetworkTime0()), eu.amidst.core.learning.parametric.ParallelMLMissingData.PartialSufficientSatistics.createInitPartialSufficientStatistics(ef_bayesianNetwork.getBayesianNetworkTimeT())); }
public EF_DynamicBayesianNetwork.DynamiceBNCompoundVector getCompoundVector(){ EF_DynamicBayesianNetwork.DynamiceBNCompoundVector vector = new EF_DynamicBayesianNetwork.DynamiceBNCompoundVector(this.getTime0().getCompoundVector(),this.getTimeT().getCompoundVector()); vector.setIndicatorTime0(1); vector.setIndicatorTimeT(1); return vector; } }
private static DynamicPartialSufficientSatistics computeCountSufficientStatistics(EF_DynamicBayesianNetwork bn, DynamicDataInstance dataInstance){ if (dataInstance.getTimeID()==0) { return DynamicPartialSufficientSatistics.createPartialSufficientStatisticsTime0(eu.amidst.core.learning.parametric.ParallelMLMissingData.computeCountSufficientStatistics(bn.getBayesianNetworkTime0(),dataInstance)); }else { return DynamicPartialSufficientSatistics.createPartialSufficientStatisticsTimeT(eu.amidst.core.learning.parametric.ParallelMLMissingData.computeCountSufficientStatistics(bn.getBayesianNetworkTimeT(),dataInstance)); } }
/** * Learns the parameters of a TAN structure using the {@link eu.amidst.core.learning.parametric.ParallelMaximumLikelihood}. * @param dataStream a stream of data instances for learning the parameters. * @param batchSize the size of the batch for the parallel ML algorithm. * @return a <code>BayesianNetwork</code> object in ADMIST format. * @throws ExceptionHugin */ public BayesianNetwork learn(DataStream<DataInstance> dataStream, int batchSize) throws ExceptionHugin { ParallelMLMissingData parameterLearningAlgorithm = new ParallelMLMissingData(); parameterLearningAlgorithm.setWindowsSize(batchSize); parameterLearningAlgorithm.setParallelMode(this.parallelMode); parameterLearningAlgorithm.setDAG(this.learnDAG(dataStream)); parameterLearningAlgorithm.setDataStream(dataStream); parameterLearningAlgorithm.initLearning(); parameterLearningAlgorithm.runLearning(); learnedBN = parameterLearningAlgorithm.getLearntBayesianNetwork(); this.inference = new HuginInference(); this.inference.setModel(this.learnedBN); return this.learnedBN; }
public double updateModel(DataOnMemory<DataInstance> datBatch){ if (!initialized) initLearning(); return learningAlgorithm.updateModel(datBatch); }
public <E extends UnivariateDistribution> E getPosteriorDistribution(String varName) { if (learningAlgorithm !=null){ return (E)this.learningAlgorithm.getLearntBayesianNetwork() .getConditionalDistribution(dag.getVariables().getVariableByName(varName)); } else if (learningAlgorithmFlink != null ){ return (E)this.learningAlgorithmFlink.getLearntBayesianNetwork() .getConditionalDistribution(dag.getVariables().getVariableByName(varName)); } return null; }
/** * Constructor of a classifier which is initialized with the default arguments: * the last variable in attributes is the class variable and importance sampling * is the inference algorithm for making the predictions. * * @param attributes list of attributes of the classifier (i.e. its variables) * @throws WrongConfigurationException is thrown when the attributes passed are not suitable * for such classifier */ public AODE(Attributes attributes) throws WrongConfigurationException { super(attributes); this.setLearningAlgorithm(new ParallelMaximumLikelihood()); classVariables = new ArrayList<>(); }
public static DynamicPartialSufficientSatistics sumNonStateless(DynamicPartialSufficientSatistics a, DynamicPartialSufficientSatistics b) { if (b.getTime0()==null) b.time0=a.getTime0(); else if (a.getTime0()!=null) b.getTime0().sum(a.getTime0()); if (b.getTimeT()==null) b.timeT=a.getTimeT(); else if (a.getTimeT()!=null) b.getTimeT().sum(a.getTimeT()); return b; }