/** * Constructor of a classifier which is initialized with the default arguments: * the last variable in attributes is the class variable and importance sampling * is the inference algorithm for making the predictions. * * @param attributes list of attributes of the classifier (i.e. its variables) * @throws WrongConfigurationException is thrown when the attributes passed are not suitable * for such classifier */ public AODE(Attributes attributes) throws WrongConfigurationException { super(attributes); this.setLearningAlgorithm(new ParallelMaximumLikelihood()); classVariables = new ArrayList<>(); }
/** * Constructor of a classifier which is initialized with the default arguments: * the last variable in attributes is the class variable and importance sampling * is the inference algorithm for making the predictions. * * @param attributes list of attributes of the classifier (i.e. its variables) * @throws WrongConfigurationException is thrown when the attributes passed are not suitable * for such classifier */ public HODE(Attributes attributes) throws WrongConfigurationException { super(attributes); this.setLearningAlgorithm(new ParallelMaximumLikelihood()); }
public static void compareNumberOfCores() throws IOException { System.out.println("Comparison with different number of cores"); System.out.println("-----------------------------------------"); createBayesianNetwork(); if(isSampleData()) sampleBayesianNetwork(); DataStream<DataInstance> data = DataStreamLoader.open(getPathToFile()); ParallelMaximumLikelihood parameterLearningAlgorithm = new ParallelMaximumLikelihood(); parameterLearningAlgorithm.setParallelMode(isParallel()); parameterLearningAlgorithm.setDAG(dag); parameterLearningAlgorithm.setDataStream(data); parameterLearningAlgorithm.setWindowsSize(getBatchSize()); System.out.println("Available number of processors: " + Runtime.getRuntime().availableProcessors()); //We discard the first five experiments and then record the following 10 repetitions double average = 0.0; for (int j = 0; j <15; j++) { long start = System.nanoTime(); parameterLearningAlgorithm.runLearning(); long duration = (System.nanoTime() - start) / 1; double seconds = duration / 1000000000.0; System.out.println("Iteration ["+j+"] = "+seconds + " secs"); if(j>4){ average+=seconds; } data.restart(); } System.out.println("Average = "+average/10.0 + " secs"); }
public static void compareBatchSizes() throws IOException { System.out.println("Batch size comparisons"); System.out.println("----------------------"); createBayesianNetwork(); if(isSampleData()) sampleBayesianNetwork(); DataStream<DataInstance> data = DataStreamLoader.open(getPathToFile()); ParallelMaximumLikelihood parameterLearningAlgorithm = new ParallelMaximumLikelihood(); parameterLearningAlgorithm.setParallelMode(isParallel()); parameterLearningAlgorithm.setDAG(dag); parameterLearningAlgorithm.setDataStream(data); //We discard the first five experiments and then record the following 10 repetitions for (int i = 0; i < batchSizes.length; i++) { long average = 0L; for (int j = 0; j < 5; j++) { parameterLearningAlgorithm.setWindowsSize(batchSizes[i]); long start = System.nanoTime(); parameterLearningAlgorithm.runLearning(); long duration = (System.nanoTime() - start) / 1; double seconds = duration / 1000000000.0; //System.out.println("Iteration ["+j+"] = "+seconds + " secs"); if (j > 4) { average += seconds; } } System.out.println(batchSizes[i]+"\t"+average/10.0 + " secs"); } }
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a ParameterLearningAlgorithm object with the MaximumLikehood builder ParameterLearningAlgorithm parameterLearningAlgorithm = new ParallelMaximumLikelihood(); //We fix the DAG structure parameterLearningAlgorithm.setDAG(getNaiveBayesStructure(data,0)); //We should invoke this method before processing any data parameterLearningAlgorithm.initLearning(); //Then we show how we can perform parameter learnig by a sequential updating of data batches. for (DataOnMemory<DataInstance> batch : data.iterableOverBatches(100)){ parameterLearningAlgorithm.updateModel(batch); } //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }
public static void main(String[] args) throws Exception { //We can open the data stream using the static class DataStreamLoader DataStream<DataInstance> data = DataStreamLoader.open("datasets/simulated/WasteIncineratorSample.arff"); //We create a ParallelMaximumLikelihood object with the MaximumLikehood builder ParallelMaximumLikelihood parameterLearningAlgorithm = new ParallelMaximumLikelihood(); //We activate the parallel mode. parameterLearningAlgorithm.setParallelMode(true); //We desactivate the debug mode. parameterLearningAlgorithm.setDebug(false); //We fix the DAG structure parameterLearningAlgorithm.setDAG(MaximimumLikelihoodByBatchExample.getNaiveBayesStructure(data, 0)); //We set the batch size which will be employed to learn the model in parallel parameterLearningAlgorithm.setWindowsSize(100); //We set the data which is going to be used for leaning the parameters parameterLearningAlgorithm.setDataStream(data); //We perform the learning parameterLearningAlgorithm.runLearning(); //And we get the model BayesianNetwork bnModel = parameterLearningAlgorithm.getLearntBayesianNetwork(); //We print the model System.out.println(bnModel.toString()); }