public static double priorProbabilityTrue(Vertex<? extends Tensor<Boolean>> vertex, int sampleCount, KeanuRandom random) { KeanuProbabilisticModel model = new KeanuProbabilisticModel(vertex.getConnectedGraph()); long trueCount = MetropolisHastings.withDefaultConfigFor(model, random) .generatePosteriorSamples(model, Collections.singletonList(vertex)).stream() .limit(sampleCount) .filter(state -> state.get(vertex).scalar()) .count(); return trueCount / (double) sampleCount; }
@Test public void youCanCreateADefaultMetropolisHastingsSampler() { KeanuProbabilisticModel model = mock(KeanuProbabilisticModel.class); MetropolisHastings mh = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model); }
public static KDEVertex approximate(DoubleVertex vertex, Integer nSamples) { KeanuProbabilisticModel model = new KeanuProbabilisticModel(vertex.getConnectedGraph()); DoubleVertexSamples vertexSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .getPosteriorSamples(model, ImmutableList.of(vertex), nSamples).getDoubleTensorSamples(vertex); return approximate(vertexSamples); }
private static void scalarAutocorrelationExample() { DoubleVertex A = new GaussianVertex(20.0, 1.0); DoubleVertex B = new GaussianVertex(20.0, 1.0); DoubleVertex C = new GaussianVertex(A.plus(B), 1.0); C.observe(43.0); A.setValue(20.0); B.setValue(20.0); KeanuProbabilisticModel model = new KeanuProbabilisticModel(C.getConnectedGraph()); //%%SNIPPET_START%% ScalarAutocorrelation NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVariables(), 100 ); DoubleTensor autocorrelation = posteriorSamples.getDoubleTensorSamples(A).getAutocorrelation(); //%%SNIPPET_END%% ScalarAutocorrelation }
@Test public void samplesFromPriorWithObservedDeterministic() { BernoulliVertex A = new BernoulliVertex(0.5); BernoulliVertex B = new BernoulliVertex(0.5); BooleanVertex C = A.or(B); C.observe(false); BayesianNetwork net = new BayesianNetwork(A.getConnectedGraph()); net.probeForNonZeroProbability(100); KeanuProbabilisticModel model = new KeanuProbabilisticModel(net); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, Collections.singletonList(A), 10000 ); double postProbTrue = posteriorSamples.get(A).probability(v -> v.scalar()); assertEquals(0.0, postProbTrue, 0.01); }
@Test public void doesNotStoreSamplesThatWillBeDropped() { int sampleCount = 1000; int dropCount = 100; int downSampleInterval = 2; GaussianVertex A = new GaussianVertex(0, 1); KeanuProbabilisticModel model = new KeanuProbabilisticModel(A.getConnectedGraph()); NetworkSamples samples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(dropCount) .downSampleInterval(downSampleInterval) .generate(sampleCount); assertEquals((sampleCount - dropCount) / downSampleInterval, samples.size()); assertEquals(0.0, samples.getDoubleTensorSamples(A).getAverages().scalar(), 0.1); }
@Test public void samplingWithAssertThatShouldntFire() { UniformVertex temperature = new UniformVertex(20., 30.); temperature.lessThan(new ConstantDoubleVertex(30)).assertTrue(); temperature.greaterThan(new ConstantDoubleVertex(20)).assertTrue(); GaussianVertex firstThermometer = new GaussianVertex(temperature, 2.5); GaussianVertex secondThermometer = new GaussianVertex(temperature, 5.); firstThermometer.observe(25.); secondThermometer.observe(30.); KeanuProbabilisticModel model = new KeanuProbabilisticModel(temperature.getConnectedGraph()); MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVariables(), 100 ); }
public static io.improbable.keanu.algorithms.mcmc.MetropolisHastings withDefaultConfigFor(KeanuProbabilisticModel model) { return withDefaultConfigFor(model, KeanuRandom.getDefaultRandom()); }
@Test public void canStreamSamples() { int sampleCount = 1000; int dropCount = 100; int downSampleInterval = 1; GaussianVertex A = new GaussianVertex(0, 1); KeanuProbabilisticModel model = new KeanuProbabilisticModel(A.getConnectedGraph()); MetropolisHastings algo = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model); double averageA = algo.generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(dropCount) .downSampleInterval(downSampleInterval) .stream() .limit(sampleCount) .mapToDouble(networkState -> networkState.get(A).scalar()) .average().getAsDouble(); assertEquals(0.0, averageA, 0.1); }
@Test public void returnsLargerKLDivergenceIfTheLocationOfQIsFurtherFromP_QIsQDistribution() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Collections.singletonList(v1), 1000); QDistribution q1 = new TestGaussianQDistribution(0.1, 1.); QDistribution q2 = new TestGaussianQDistribution(10.0, 1.); assertThat(KLDivergence.compute(q1, samples), lessThan(KLDivergence.compute(q2, samples))); }
@Test public void returnsLargerKLDivergenceIfTheLocationOfQIsFurtherFromP_QIsProbabilisticDouble() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Collections.singletonList(v1), 1000); ProbabilisticDouble q1 = new GaussianVertex(0.1, 1.); ProbabilisticDouble q2 = new GaussianVertex(10.0, 1.); assertThat(KLDivergence.compute(q1, samples), lessThan(KLDivergence.compute(q2, samples))); }
/** * @param model network for which to choose sampling algorithm. * @param random the random number generator. * @return recommended sampling algorithm for this network. */ public PosteriorSamplingAlgorithm withDefaultConfigFor(KeanuProbabilisticModel model, KeanuRandom random) { if (DifferentiableChecker.isDifferentiableWrtLatents(model.getLatentOrObservedVertices())) { return Keanu.Sampling.NUTS.withDefaultConfig(random); } else { return Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model, random); } } }
@Test public void throwsExceptionIfNetworkStateHasMoreThanOneVertexAndQIsProbabilisticDouble() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Arrays.asList(v1, v3), 1000); ProbabilisticDouble q = new GaussianVertex(0.1, 1.); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("A NetworkState does not contain exactly 1 variable and ProbabilisticDouble can only compute the log probability of one value. Try computing KL divergence against a QDistribution instead."); KLDivergence.compute(q, samples); }
private static double calculateMeanOfVertex(IntegerVertex vertex) { KeanuProbabilisticModel model = new KeanuProbabilisticModel(vertex.getConnectedGraph()); return MetropolisHastings.withDefaultConfigFor(model, KeanuRandom.getDefaultRandom()) .generatePosteriorSamples(model, Collections.singletonList(vertex)).stream() .limit(2000) .collect(Collectors.averagingInt((NetworkSample state) -> state.get(vertex).scalar())); } }
@Category(Slow.class) @Test public void samplesComplexDiscretePriorWithDefaults() { MCMCTestCase testCase = new MultiVariateDiscreteTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVertices(), 1000 ); testCase.assertExpected(posteriorSamples); }
public NetworkSamples sample(int n) { BayesianNetwork myNet = new BayesianNetwork(infectedOysters.getConnectedGraph()); myNet.probeForNonZeroProbability(100, random); assertNotEquals(Double.NEGATIVE_INFINITY, myNet.getLogOfMasterP()); KeanuProbabilisticModel model = new KeanuProbabilisticModel(myNet); return MetropolisHastings.withDefaultConfigFor(model, random).getPosteriorSamples(model, myNet.getLatentVertices(), n); }
@Test public void whenRunningMetropolisHastingsThenSamplesArePrinted() { final Vertex<DoubleTensor> temperature = new UniformVertex(20., 30.); new PrintVertex<>(temperature); KeanuProbabilisticModel model = new KeanuProbabilisticModel(temperature.getConnectedGraph()); final int nSamples = 100; MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, model.getLatentVariables(), nSamples); verify(printStream, atLeast(nSamples)).print(anyString()); }
@Test public void samplingWithAssertionWorks() { thrown.expect(GraphAssertionException.class); GaussianVertex gaussian = new GaussianVertex(5, 1); gaussian.greaterThan(new ConstantDoubleVertex(1000)).assertTrue(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(gaussian.getConnectedGraph()); MetropolisHastings.withDefaultConfigFor(model).generatePosteriorSamples(model, model.getLatentVariables()).generate(10); }
/** * Runs the MetropolisHastings algorithm and saves the resulting samples to results */ public void run() { KeanuProbabilisticModel model = new KeanuProbabilisticModel(buildBayesianNetwork()); Integer numSamples = 500; results = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).generatePosteriorSamples( model, model.getLatentVariables() ).dropCount(numSamples/5).downSampleInterval(3).generate(numSamples); }
@Test public void samplesSimpleDiscretePriorWithDefaults() { MCMCTestCase testCase = new SingleVariateDiscreteTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVertices(), 10000 ); testCase.assertExpected(posteriorSamples); }