/** * @param model a probabilistic model containing latent variables * @param variablesToSampleFrom the variables to include in the returned samples * @param sampleCount number of samples to take using the algorithm * @return Samples for each variable ordered by MCMC iteration */ @Override public NetworkSamples getPosteriorSamples(ProbabilisticModel model, List<? extends Variable> variablesToSampleFrom, int sampleCount) { return generatePosteriorSamples(model, variablesToSampleFrom) .generate(sampleCount); }
public static KDEVertex approximate(DoubleVertex vertex, Integer nSamples) { KeanuProbabilisticModel model = new KeanuProbabilisticModel(vertex.getConnectedGraph()); DoubleVertexSamples vertexSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .getPosteriorSamples(model, ImmutableList.of(vertex), nSamples).getDoubleTensorSamples(vertex); return approximate(vertexSamples); }
public static io.improbable.keanu.algorithms.mcmc.MetropolisHastings.MetropolisHastingsBuilder builder() { return io.improbable.keanu.algorithms.mcmc.MetropolisHastings.builder(); } }
@Test public void samplesContinuousPriorSingleVariableSelected() { MCMCTestCase testCase = new SumGaussianTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = MetropolisHastings.builder() .proposalDistribution(new PriorProposalDistribution(model.getLatentVertices())) .rejectionStrategy(new RollBackToCachedValuesOnRejection(model.getLatentVertices())) .variableSelector(MHStepVariableSelector.SINGLE_VARIABLE_SELECTOR) .build() .getPosteriorSamples( model, model.getLatentVertices(), 5000 ); testCase.assertExpected(posteriorSamples); }
public MetropolisHastings build() { return new MetropolisHastings(random, proposalDistribution, variableSelector, rejectionStrategy); }
@Override public NetworkSamplesGenerator generatePosteriorSamples(final ProbabilisticModel model, final List<? extends Variable> variablesToSampleFrom) { return new NetworkSamplesGenerator(setupSampler(model, variablesToSampleFrom), StatusBar::new); }
@Test public void samplesContinuousPriorAllVariablesSelected() { MCMCTestCase testCase = new SumGaussianTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = MetropolisHastings.builder() .proposalDistribution(new PriorProposalDistribution(model.getLatentVertices())) .rejectionStrategy(new RollBackToCachedValuesOnRejection(model.getLatentVertices())) .variableSelector(MHStepVariableSelector.FULL_VARIABLE_SELECTOR) .build() .getPosteriorSamples( model, model.getLatentVertices(), 5000 ); testCase.assertExpected(posteriorSamples); }
@Test public void whenRunningMetropolisHastingsThenSamplesArePrinted() { final Vertex<DoubleTensor> temperature = new UniformVertex(20., 30.); new PrintVertex<>(temperature); KeanuProbabilisticModel model = new KeanuProbabilisticModel(temperature.getConnectedGraph()); final int nSamples = 100; MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, model.getLatentVariables(), nSamples); verify(printStream, atLeast(nSamples)).print(anyString()); }
public static double priorProbabilityTrue(Vertex<? extends Tensor<Boolean>> vertex, int sampleCount, KeanuRandom random) { KeanuProbabilisticModel model = new KeanuProbabilisticModel(vertex.getConnectedGraph()); long trueCount = MetropolisHastings.withDefaultConfigFor(model, random) .generatePosteriorSamples(model, Collections.singletonList(vertex)).stream() .limit(sampleCount) .filter(state -> state.get(vertex).scalar()) .count(); return trueCount / (double) sampleCount; }
@Category(Slow.class) @Test public void samplesComplexDiscreteWithFullVariableSelect() { MCMCTestCase testCase = new MultiVariateDiscreteTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = MetropolisHastings.builder() .proposalDistribution(new PriorProposalDistribution(model.getLatentVertices())) .rejectionStrategy(new RollBackToCachedValuesOnRejection(model.getLatentVertices())) .variableSelector(MHStepVariableSelector.FULL_VARIABLE_SELECTOR) .build() .getPosteriorSamples( model, model.getLatentVertices(), 1000 ); testCase.assertExpected(posteriorSamples); }
@Category(Slow.class) @Test public void youCanChooseSamplingInsteadOfGradientOptimization() { final int smallRawDataSize = 20; final int samplingCount = 5000; LinearRegressionTestUtils.TestData data = LinearRegressionTestUtils.generateSingleFeatureData(smallRawDataSize); ProposalDistribution proposalDistribution = new GaussianProposalDistribution(DoubleTensor.scalar(0.25)); SamplingModelFitting sampling = new SamplingModelFitting(model -> MetropolisHastings.builder() .proposalDistribution(proposalDistribution) .variableSelector(MHStepVariableSelector.SINGLE_VARIABLE_SELECTOR) .rejectionStrategy(new RollbackAndCascadeOnRejection(model.getLatentVertices())) .build(), samplingCount); RegressionModel linearRegressionModel = RegressionModel.withTrainingData(data.xTrain, data.yTrain) .withRegularization(RegressionRegularization.LASSO) .withPriorOnIntercept(0, data.intercept) .withPriorOnWeights( DoubleTensor.create(0., data.weights.getShape()), data.weights ) .withSampling(sampling) .build(); NetworkSamples networkSamples = sampling.getNetworkSamples().drop(samplingCount / 10).downSample(2); assertSampledWeightsAndInterceptMatchTestData( networkSamples.getDoubleTensorSamples(linearRegressionModel.getWeightVertex().getId()), networkSamples.getDoubleTensorSamples(linearRegressionModel.getInterceptVertex().getId()), data); }
@Test public void returnsLargerKLDivergenceIfTheLocationOfQIsFurtherFromP_QIsProbabilisticDouble() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Collections.singletonList(v1), 1000); ProbabilisticDouble q1 = new GaussianVertex(0.1, 1.); ProbabilisticDouble q2 = new GaussianVertex(10.0, 1.); assertThat(KLDivergence.compute(q1, samples), lessThan(KLDivergence.compute(q2, samples))); }
/** * Runs the MetropolisHastings algorithm and saves the resulting samples to results */ public void run() { KeanuProbabilisticModel model = new KeanuProbabilisticModel(buildBayesianNetwork()); Integer numSamples = 500; results = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).generatePosteriorSamples( model, model.getLatentVariables() ).dropCount(numSamples/5).downSampleInterval(3).generate(numSamples); }
@Test public void youCanUseAGaussianProposal() { DoubleVertex A = new GaussianVertex(20.0, 1.0); DoubleVertex B = new GaussianVertex(20.0, 1.0); A.setValue(20.0); B.setValue(20.0); DoubleVertex Cobserved = new GaussianVertex(A.plus(B), 1.0); Cobserved.observe(46.0); BayesianNetwork bayesNet = new BayesianNetwork(Arrays.asList(A, B, Cobserved)); bayesNet.probeForNonZeroProbability(100); ProposalDistribution proposalDistribution = new GaussianProposalDistribution(DoubleTensor.scalar(1.)); MetropolisHastings metropolisHastings = MetropolisHastings.builder() .proposalDistribution(proposalDistribution) .rejectionStrategy(new RollbackAndCascadeOnRejection(bayesNet.getLatentVertices())) .build(); NetworkSamples posteriorSamples = metropolisHastings.getPosteriorSamples( new KeanuProbabilisticModel(bayesNet), Arrays.asList(A, B), 1000 ); double averagePosteriorA = posteriorSamples.getDoubleTensorSamples(A).getAverages().scalar(); double averagePosteriorB = posteriorSamples.getDoubleTensorSamples(B).getAverages().scalar(); double actual = averagePosteriorA + averagePosteriorB; assertEquals(44.0, actual, 0.1); }
SamplingModelFitting sampling = new SamplingModelFitting(model -> MetropolisHastings.builder() .proposalDistribution(proposalDistribution) .variableSelector(MHStepVariableSelector.SINGLE_VARIABLE_SELECTOR)
@Test public void returnsLargerKLDivergenceIfTheLocationOfQIsFurtherFromP_QIsQDistribution() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Collections.singletonList(v1), 1000); QDistribution q1 = new TestGaussianQDistribution(0.1, 1.); QDistribution q2 = new TestGaussianQDistribution(10.0, 1.); assertThat(KLDivergence.compute(q1, samples), lessThan(KLDivergence.compute(q2, samples))); }
private static double calculateMeanOfVertex(IntegerVertex vertex) { KeanuProbabilisticModel model = new KeanuProbabilisticModel(vertex.getConnectedGraph()); return MetropolisHastings.withDefaultConfigFor(model, KeanuRandom.getDefaultRandom()) .generatePosteriorSamples(model, Collections.singletonList(vertex)).stream() .limit(2000) .collect(Collectors.averagingInt((NetworkSample state) -> state.get(vertex).scalar())); } }
BayesianNetwork network = new BayesianNetwork(start.getConnectedGraph()); MetropolisHastings.builder() .proposalDistribution(new PriorProposalDistribution(network.getLatentVertices())) .rejectionStrategy(new RollBackToCachedValuesOnRejection(network.getLatentVertices())) .build() .getPosteriorSamples( new KeanuProbabilisticModel(network), network.getLatentVertices(),
SamplingModelFitting sampling = new SamplingModelFitting(model -> MetropolisHastings.builder() .proposalDistribution(proposalDistribution) .variableSelector(MHStepVariableSelector.SINGLE_VARIABLE_SELECTOR)
@Test public void samplesSimpleDiscretePriorWithDefaults() { MCMCTestCase testCase = new SingleVariateDiscreteTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVertices(), 10000 ); testCase.assertExpected(posteriorSamples); }