public MAPDynamicInstancesSampler(BayesianNetwork bn, Attributes attributes, Map<Variable,Boolean> hiddenVars, Map<Variable,Double> marVars, int seed) { this.bn = bn; this.seed = seed; this.attributes = attributes; this.hiddenVars = hiddenVars; this.marVars = marVars; this.causalOrder = Utils.getTopologicalOrder(this.bn.getDAG()); }
public static void main(String[] args) throws Exception { // load the true Bayesian network BayesianNetwork originalBnet = BayesianNetworkLoader.loadFromFile(args[0]); System.out.println("\n Network \n " + args[0]); System.out.println("\n Number of variables \n " + originalBnet.getDAG().getVariables().getNumberOfVars()); //Sampling from the input BN BayesianNetworkSampler sampler = new BayesianNetworkSampler(originalBnet); sampler.setSeed(0); // Defines the size of the data to be generated from the input BN int sizeData = Integer.parseInt(args[1]); System.out.println("\n Sampling and saving the data... \n "); DataStream<DataInstance> data = sampler.sampleToDataStream(sizeData); DataStreamWriter.writeDataToFile(data, "./data.arff"); }
public static void main(String[] args) { DynamicBayesianNetworkGenerator.setNumberOfContinuousVars(0); DynamicBayesianNetworkGenerator.setNumberOfDiscreteVars(5); DynamicBayesianNetworkGenerator.setNumberOfStates(2); DynamicBayesianNetworkGenerator.setNumberOfLinks(5); DynamicBayesianNetwork dynamicNaiveBayes = DynamicBayesianNetworkGenerator.generateDynamicNaiveBayes(new Random(0), 2, true); System.out.println("ORIGINAL DYNAMIC DAG:"); System.out.println(dynamicNaiveBayes.getDynamicDAG().toString()); //System.out.println(dynamicNaiveBayes.toString()); System.out.println(); //dynamicNaiveBayes.getDynamicVariables().getListOfDynamicVariables().forEach(var -> System.out.println(var.getName())); //dynamicNaiveBayes.getDynamicVariables().getListOfDynamicVariables().forEach(var -> System.out.println(var.getName())); BayesianNetwork bn = DynamicToStaticBNConverter.convertDBNtoBN(dynamicNaiveBayes,4); System.out.println("NEW STATIC DAG:"); System.out.println(); System.out.println(bn.getDAG().toString()); System.out.println(); System.out.println("ORIGINAL DYNAMIC BN:"); System.out.println(dynamicNaiveBayes.toString()); System.out.println("STATIC BN:"); System.out.println(bn.toString()); } }
public static void main(String[] args) throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataFlink<DataInstance> dataFlink = DataFlinkLoader.loadDataFromFile(env, "./data.arff", false); DAG dag = SetBNwithHidden.getHiddenNaiveBayesStructure(dataFlink); BayesianNetwork bnet = new BayesianNetwork(dag); System.out.println("\n Number of variables \n " + bnet.getDAG().getVariables().getNumberOfVars()); System.out.println(dag.toString()); BayesianNetworkWriter.save(bnet, "./BNHiddenExample.bn"); }
parallelMaximumLikelihood.setDAG(originalBnet.getDAG()); parallelMaximumLikelihood.initLearning(); parallelMaximumLikelihood.updateModel(dataFlink);
parallelMaximumLikelihood.setDAG(originalBnet.getDAG()); parallelMaximumLikelihood.initLearning(); parallelMaximumLikelihood.updateModel(data);
/** * Sets the Hugin model structure from the AMIDST DAG. * @param amidstBN the Bayesian network model in AMIDST format. * @throws ExceptionHugin */ private void setStructure (BayesianNetwork amidstBN) throws ExceptionHugin { DAG dag = amidstBN.getDAG(); for (Variable amidstChild: amidstBN.getVariables()) { for (Variable amidstParent: dag.getParentSet(amidstChild)) { Node huginChild = this.huginBN.getNodeByName(amidstChild.getName()); Node huginParent = this.huginBN.getNodeByName(amidstParent.getName()); huginChild.addParent(huginParent); } } }
parallelVB.setDAG(originalBnet.getDAG()); parallelVB.initLearning(); parallelVB.updateModel(dataFlink);
System.out.println("\n Number of variables \n " + bn.getDAG().getVariables().getNumberOfVars()); System.out.println(dag.toString());
List<Variable> topologicalOrder = Utils.getTopologicalOrder(bn.getDAG());
vmp.setThreshold(0.0001); svb.setDAG(oneNormalVarBN.getDAG());
List<Variable> topologicalOrder = Utils.getTopologicalOrder(bn.getDAG());
svb.setDAG(asianet.getDAG()); svb.setWindowsSize(100); svb.initLearning();
Variable staticVar_current = variables.getVariableByName(groupedClassName + "_t1"); Variable staticVar_interface = variables.getVariableByName(groupedClassName + "_t0"); List<Variable> parents = bn.getDAG().getParentSet(staticVar_current).getParents(); ConditionalDistribution conDist_dynamic = Serialization.deepCopy(conDistT); Variable staticVar_current = variables.getVariableByName(groupedClassName + "_t2"); Variable staticVar_interface = variables.getVariableByName(groupedClassName + "_t1"); List<Variable> parents = bn.getDAG().getParentSet(staticVar_current).getParents(); ConditionalDistribution conDist_dynamic = Serialization.deepCopy(conDistT); generalConditionalDistTimeT = new Multinomial_MultinomialParents(staticVar, bn.getDAG().getParentSet(staticVar).getParents()); Variable staticVar0 = variables.getVariableByName(groupedClassName + "_t1"); Variable staticVar0_interface = variables.getVariableByName(groupedClassName + "_t0"); List<Variable> parents = bn.getDAG().getParentSet(staticVar0).getParents(); ConditionalDistribution conDist_dynamic = Serialization.deepCopy(model.getConditionalDistributionsTimeT().get(dynVar.getVarID())); ConditionalDistribution conditionalDistTime1 = groupedDistributionMAPVariableTimeT(dynVar, staticVar0, staticVar0_interface.getNumberOfStates(), parents, conDist_dynamic, modelNumber); List<Variable> parents1 = bn.getDAG().getParentSet(staticVar1).getParents(); List<Variable> parentList = bn.getDAG().getParentSet(staticVar2).getParents();
public static void baseTest(ExecutionEnvironment env, DataStream<DataInstance> data, BayesianNetwork network, int batchSize, double error) throws IOException, ClassNotFoundException { DataStreamWriter.writeDataToFile(data, "./datasets/simulated/tmp.arff"); DataFlink<DataInstance> dataFlink = DataFlinkLoader.loadDataFromFile(env, "./datasets/simulated/tmp.arff", false); network.getDAG().getVariables().setAttributes(dataFlink.getAttributes()); //Structure learning is excluded from the test, i.e., we use directly the initial Asia network structure // and just learn then test the parameter learning //Parameter Learning dVMP parallelVB = new dVMP(); parallelVB.setOutput(true); parallelVB.setMaximumGlobalIterations(10); parallelVB.setSeed(5); parallelVB.setBatchSize(batchSize); parallelVB.setLocalThreshold(0.001); parallelVB.setGlobalThreshold(0.01); parallelVB.setMaximumLocalIterations(100); parallelVB.setMaximumGlobalIterations(100); parallelVB.setDAG(network.getDAG()); parallelVB.initLearning(); parallelVB.updateModel(dataFlink); BayesianNetwork bnet = parallelVB.getLearntBayesianNetwork(); //Check if the probability distributions of each node for (Variable var : network.getVariables()) { if (Main.VERBOSE) System.out.println("\n------ Variable " + var.getName() + " ------"); if (Main.VERBOSE) System.out.println("\nTrue distribution:\n" + network.getConditionalDistribution(var)); if (Main.VERBOSE) System.out.println("\nLearned distribution:\n" + bnet.getConditionalDistribution(var)); Assert.assertTrue(bnet.getConditionalDistribution(var).equalDist(network.getConditionalDistribution(var), error)); } //Or check directly if the true and learned networks are equals Assert.assertTrue(bnet.equalBNs(network, error)); }
/** * Sets the distribution of a normal variable with multinomial parents in the AMIDST model from the corresponding * distribution in the Hugin model. * For each assignment of the multinomial parents, a univariate normal is set. * @param huginVar the Hugin variable with the distribution to be converted. * @throws ExceptionHugin */ private void setNormal_MultinomialParents(Node huginVar) throws ExceptionHugin { int indexNode = this.huginBN.getNodes().indexOf(huginVar); Variable amidstVar = this.amidstBN.getVariables().getVariableById(indexNode); List<Variable> conditioningVariables = this.amidstBN.getDAG().getParentSet(amidstVar).getParents(); int numParentAssignments = MultinomialIndex.getNumberOfPossibleAssignments(conditioningVariables); Normal_MultinomialParents dist = this.amidstBN.getConditionalDistribution(amidstVar); for (int i = 0; i < numParentAssignments; i++) { Normal normal = dist.getNormal(i); this.setNormal(huginVar, normal, i); } }
Variable var = bn.getDAG().getVariables().getVariableById(i); Node n = nodeList.get(i); if (n.getKind().compareTo(NetworkModel.H_KIND_DISCRETE) == 0) { System.out.println("Structural Learning in Hugin: " + watch.stop()); DAG dagLearned = (BNConverterToAMIDST.convertToAmidst(huginNetwork)).getDAG(); dagLearned.getVariables().setAttributes(dataStream.getAttributes()); return dagLearned;
/** * Sets the distribution of a multinomial variable with multinomial parents in the AMIDST model * from the corresponding distribution in the Hugin model. * @param huginVar the Hugin variable with the distribution to be converted. * @throws ExceptionHugin */ private void setMultinomial_MultinomialParents(Node huginVar) throws ExceptionHugin { int indexNode = this.huginBN.getNodes().indexOf(huginVar); Variable amidstVar = this.amidstBN.getVariables().getVariableById(indexNode); int numStates = amidstVar.getNumberOfStates(); double[] huginProbabilities = huginVar.getTable().getData(); List<Variable> parents = this.amidstBN.getDAG().getParentSet(amidstVar).getParents(); int numParentAssignments = MultinomialIndex.getNumberOfPossibleAssignments(parents); for (int i = 0; i < numParentAssignments; i++) { double[] amidstProbabilities = new double[numStates]; for (int k = 0; k < numStates; k++) { amidstProbabilities[k] = huginProbabilities[i * numStates + k]; } Multinomial_MultinomialParents dist = this.amidstBN.getConditionalDistribution(amidstVar); dist.getMultinomial(i).setProbabilities(amidstProbabilities); } }