public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a SVB object SVB parameterLearningAlgorithm = new SVB(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(DAGGenerator.getHiddenNaiveBayesStructure(data.getAttributes(),"H",2)); //We fix the size of the window, which must be equal to the size of the data batches we use for learning parameterLearningAlgorithm.setWindowsSize(100); //We can activate the output parameterLearningAlgorithm.setOutput(true); //We should invoke this method before processing any data parameterLearningAlgorithm.initLearning(); //Then we show how we can perform parameter learning by a sequential updating of data batches. for (DataOnMemory<DataInstance> batch : data.iterableOverBatches(100)){ double log_likelhood_of_batch = parameterLearningAlgorithm.updateModel(batch); System.out.println("Log-Likelihood of Batch: "+ log_likelhood_of_batch); } //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
public static void main(String[] args) throws IOException, ClassNotFoundException { int batchSize = 100; DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff");; //We can load a Bayesian network using the static class BayesianNetworkLoader DAG dag = DAGGenerator.getNaiveBayesStructure(data.getAttributes(), "B"); BayesianNetwork bn = new BayesianNetwork(dag); data.getAttributes().forEach(attribute -> System.out.println(attribute.getName())); //Now we print the loaded model System.out.println(bn.toString()); EF_BayesianNetwork efbn = new EF_BayesianNetwork(bn); SufficientStatistics sumSS = data.parallelStream(batchSize) .map(efbn::getSufficientStatistics) //see Program 6 .reduce(SufficientStatistics::sumVectorNonStateless).get(); //.reduce((v1,v2) -> {v1.sum(v2); return v1;}).get(); sumSS.divideBy(data.stream().count()); for(int i=0; i<sumSS.size(); i++) { System.out.println(sumSS.get(i)); } }
DAG dag = DAGGenerator.getNaiveBayesStructure(dataSpark.getAttributes(), "W");
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a SVB object SVB parameterLearningAlgorithm = new SVB(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(DAGGenerator.getHiddenNaiveBayesStructure(data.getAttributes(),"GlobalHidden", 2)); //We fix the size of the window parameterLearningAlgorithm.setWindowsSize(100); //We can activate the output parameterLearningAlgorithm.setOutput(true); //We set the data which is going to be used for leaning the parameters parameterLearningAlgorithm.setDataStream(data); //We perform the learning parameterLearningAlgorithm.runLearning(); //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
DAG dag = DAGGenerator.getNaiveBayesStructure(dataFlink.getAttributes(), "DiscreteVar4"); System.out.println(dag.toString());
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a ParallelSVB object ParallelSVB parameterLearningAlgorithm = new ParallelSVB(); //We fix the number of cores we want to exploit parameterLearningAlgorithm.setNCores(4); //We fix the DAG structure, which is a Naive Bayes with a global latent binary variable parameterLearningAlgorithm.setDAG(DAGGenerator.getHiddenNaiveBayesStructure(data.getAttributes(), "H", 2)); //We fix the size of the window parameterLearningAlgorithm.getSVBEngine().setWindowsSize(100); //We can activate the output parameterLearningAlgorithm.setOutput(true); //We set the data which is going to be used for leaning the parameters parameterLearningAlgorithm.setDataStream(data); //We perform the learning parameterLearningAlgorithm.runLearning(); //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
DAG dag = DAGGenerator.getNaiveBayesStructure(dataFlink.getAttributes(), "DiscreteVar4"); System.out.println(dag.toString());
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a SVB object SVBFading parameterLearningAlgorithm = new SVBFading(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(DAGGenerator.getHiddenNaiveBayesStructure(data.getAttributes(),"GlobalHidden", 2)); //We fix the fading or forgeting factor parameterLearningAlgorithm.setFadingFactor(0.9); //We fix the size of the window parameterLearningAlgorithm.setWindowsSize(100); //We can activate the output parameterLearningAlgorithm.setOutput(true); //We set the data which is going to be used for leaning the parameters parameterLearningAlgorithm.setDataStream(data); //We perform the learning parameterLearningAlgorithm.runLearning(); //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
DAG dag = DAGGenerator.getNaiveBayesStructure(dataFlink.getAttributes(), "DiscreteVar4"); System.out.println(dag.toString());
DAG dag = DAGGenerator.getNaiveBayesStructure(dataFlink.getAttributes(), "DiscreteVar4"); System.out.println(dag.toString());
DAG dag = DAGGenerator.getNaiveBayesStructure(dataFlink.getAttributes(), "DiscreteVar4"); System.out.println(dag.toString());