public static KDEVertex approximate(DoubleVertex vertex, Integer nSamples) { KeanuProbabilisticModel model = new KeanuProbabilisticModel(vertex.getConnectedGraph()); DoubleVertexSamples vertexSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .getPosteriorSamples(model, ImmutableList.of(vertex), nSamples).getDoubleTensorSamples(vertex); return approximate(vertexSamples); }
@Test public void canCalculateLogLikelihoodOnKeanuProbabilisticModelWithGradient() { ProbabilisticModel probabilisticModel = new KeanuProbabilisticModelWithGradient(D.getConnectedGraph()); canCalculateLogLikelihood(probabilisticModel); }
@Test public void canCalculateLogLikelihoodOnKeanuProbabilisticModel() { ProbabilisticModel probabilisticModel = new KeanuProbabilisticModel(D.getConnectedGraph()); canCalculateLogLikelihood(probabilisticModel); }
private NetworkState findMAPWithOptimizer() { BayesianNetwork network = new BayesianNetwork(A.getConnectedGraph()); network.probeForNonZeroProbability(100, random); GradientOptimizer graphOptimizer = KeanuOptimizer.Gradient.of(network); graphOptimizer.maxAPosteriori(); return new SimpleNetworkState(network.getLatentVertices().stream() .collect(Collectors.toMap(Vertex::getId, Vertex::getValue))); } }
@Test(expected = IllegalArgumentException.class) public void networkWithNonSaveableVerticesThrowsExceptionOnSave() throws IOException { DoubleVertex testVertex = new TestNonSaveableVertex(); BayesianNetwork net = new BayesianNetwork(testVertex.getConnectedGraph()); NetworkSaver netSaver = mock(NetworkSaver.class); net.save(netSaver); }
@Test public void canCalculateLogProbOnKeanuProbabilisticModel() { ProbabilisticModel probabilisticModel = new KeanuProbabilisticModel(D.getConnectedGraph()); canCalculateLogProb(probabilisticModel); }
@Test public void canCalculateLogProbOnKeanuProbabilisticModelWithGradient() { ProbabilisticModel probabilisticModel = new KeanuProbabilisticModelWithGradient(D.getConnectedGraph()); canCalculateLogProb(probabilisticModel); }
@Before public void setup() { innerNet = createInnerNet(); trueLocation = new UniformVertex(0.1, 50.0); Map<VertexLabel, Vertex> inputVertices = ImmutableMap.of(new VertexLabel("Location"), trueLocation); Map<VertexLabel, Vertex> outputs = ModelComposition.composeModel( innerNet, inputVertices, ImmutableList.of(new VertexLabel("Output1")) ); gaussOutputVertex = (DoubleVertex) outputs.get(new VertexLabel("Output1")); outerNet = new BayesianNetwork(gaussOutputVertex.getConnectedGraph()); }
private static void doInferenceOn(DoubleVertex unknownVertex, KeanuRandom random) { BayesianNetwork inferNet = new BayesianNetwork(unknownVertex.getConnectedGraph()); inferNet.probeForNonZeroProbability(100, random); GradientOptimizer gradientOptimizer = KeanuOptimizer.Gradient.of(inferNet); gradientOptimizer.maxAPosteriori(); }
@Test(expected = IllegalArgumentException.class) public void nonSaveableVertexThrowsExceptionOnSave() { DoubleVertex testVertex = new TestNonSaveableVertex(); BayesianNetwork net = new BayesianNetwork(testVertex.getConnectedGraph()); ProtobufSaver protobufSaver = new ProtobufSaver(net); testVertex.save(protobufSaver); }
@BeforeClass public static void setup() throws IOException { VertexId.ID_GENERATOR.set(0); DoubleVertex mu = new ConstantDoubleVertex(0); DoubleVertex sigma = new ConstantDoubleVertex(new double[]{3.0, 4.0}); DoubleVertex gaussianVertex = new GaussianVertex(mu, sigma); gaussianVertex.observe(DoubleTensor.ones(2)); gaussianVertex.setLabel("GaussianVertex"); net = new BayesianNetwork(gaussianVertex.getConnectedGraph()); someMetadata = ImmutableMap.of( "Author", "Some Author", "Tag", "MyBayesNet" ); JsonSaver jsonSaver = new JsonSaver(net); jsonSaver.save(outputStream, true, someMetadata); }
@Test public void returnsLargerKLDivergenceIfTheLocationOfQIsFurtherFromP_QIsProbabilisticDouble() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Collections.singletonList(v1), 1000); ProbabilisticDouble q1 = new GaussianVertex(0.1, 1.); ProbabilisticDouble q2 = new GaussianVertex(10.0, 1.); assertThat(KLDivergence.compute(q1, samples), lessThan(KLDivergence.compute(q2, samples))); }
@Test public void returnsLargerKLDivergenceIfTheLocationOfQIsFurtherFromP_QIsQDistribution() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Collections.singletonList(v1), 1000); QDistribution q1 = new TestGaussianQDistribution(0.1, 1.); QDistribution q2 = new TestGaussianQDistribution(10.0, 1.); assertThat(KLDivergence.compute(q1, samples), lessThan(KLDivergence.compute(q2, samples))); }
@Test(expected = IllegalArgumentException.class) public void willRejectProxyWithParent() { DoubleProxyVertex proxy = new DoubleProxyVertex(new VertexLabel("Input1")); DoubleVertex output = new GaussianVertex(proxy, 1.0); output.setLabel("Output1"); DoubleVertex outerInput = new UniformVertex(-1.0, 1.0); DoubleVertex invalidParent = new ConstantDoubleVertex(0.0); proxy.setParent(invalidParent); BayesianNetwork bayesNet = new BayesianNetwork(output.getConnectedGraph()); ModelComposition.composeModel(bayesNet, ImmutableMap.of(new VertexLabel("Input1"), outerInput), ImmutableList.of(new VertexLabel("Output1"))); }
private static double runGradientOptimizer(DoubleVertex temperature) { //%%SNIPPET_START%% GradientOptimizerMostProbable GradientOptimizer optimizer = KeanuOptimizer.Gradient.builderFor(temperature.getConnectedGraph()) .maxEvaluations(5000) .relativeThreshold(1e-8) .absoluteThreshold(1e-8) .build(); optimizer.maxAPosteriori(); double calculatedTemperature = temperature.getValue().scalar(); //%%SNIPPET_END%% GradientOptimizerMostProbable return calculatedTemperature; }
@Test public void doesRejectOnImpossibleProposal() { DoubleVertex A = new UniformVertex(0, 1); A.setValue(0.5); ProbabilisticModel model = new KeanuProbabilisticModel(A.getConnectedGraph()); MetropolisHastingsStep mhStep = stepFunctionWithConstantProposal(model, -1, alwaysAccept); MetropolisHastingsStep.StepResult result = mhStep.step( Collections.singleton(A), model.logProb() ); assertFalse(result.isAccepted()); assertEquals(0.5, A.getValue(0), 1e-10); }
@Test public void throwsExceptionIfNetworkStateHasMoreThanOneVertexAndQIsProbabilisticDouble() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Arrays.asList(v1, v3), 1000); ProbabilisticDouble q = new GaussianVertex(0.1, 1.); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("A NetworkState does not contain exactly 1 variable and ProbabilisticDouble can only compute the log probability of one value. Try computing KL divergence against a QDistribution instead."); KLDivergence.compute(q, samples); }
private BayesianNetwork createInnerNet() { location = new DoubleProxyVertex(new VertexLabel("Location")); size = new UniformVertex(0.1, 20); size.setLabel("Size"); gaussian = new GaussianVertex(location, size); gaussian.setLabel("Output1"); pareto = new ParetoVertex(location, size); pareto.setLabel("Output2"); return new BayesianNetwork(gaussian.getConnectedGraph()); }
private void assertOptimizerWorksWithDeterminant(double inputGaussianMu) { final long[] shape = new long[]{2, 2}; final DoubleVertex input = new GaussianVertex(shape, inputGaussianMu, 5); final DoubleVertex determinant = input.matrixDeterminant(); final DoubleVertex output = new GaussianVertex(determinant, 1); output.observe(new double[]{2.0, 2.4}); final BayesianNetwork net = new BayesianNetwork(output.getConnectedGraph()); KeanuOptimizer.of(net).maxLikelihood(); assertEquals(input.getValue().determinant(), 2.2, 0.1); }
@Test public void findsMaxAposterioriWithAnnealing() { BayesianNetwork network = new BayesianNetwork(A.getConnectedGraph()); network.probeForNonZeroProbability(100, random); KeanuProbabilisticModel model = new KeanuProbabilisticModel(network); NetworkState maxAPosterioriSamples = Keanu.Sampling.SimulatedAnnealing.withDefaultConfigFor(model, random).getMaxAPosteriori(model, 10000); NetworkState maxValuesFromVariational = findMAPWithOptimizer(); assertEquals(maxValuesFromVariational.get(A).scalar(), maxAPosterioriSamples.get(A).scalar(), 0.05); assertEquals(maxValuesFromVariational.get(B).scalar(), maxAPosterioriSamples.get(B).scalar(), 0.05); }