public static KDEVertex approximate(DoubleVertex vertex, Integer nSamples) { KeanuProbabilisticModel model = new KeanuProbabilisticModel(vertex.getConnectedGraph()); DoubleVertexSamples vertexSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .getPosteriorSamples(model, ImmutableList.of(vertex), nSamples).getDoubleTensorSamples(vertex); return approximate(vertexSamples); }
@Test public void whenRunningMetropolisHastingsThenSamplesArePrinted() { final Vertex<DoubleTensor> temperature = new UniformVertex(20., 30.); new PrintVertex<>(temperature); KeanuProbabilisticModel model = new KeanuProbabilisticModel(temperature.getConnectedGraph()); final int nSamples = 100; MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, model.getLatentVariables(), nSamples); verify(printStream, atLeast(nSamples)).print(anyString()); }
@Test public void returnsLargerKLDivergenceIfTheLocationOfQIsFurtherFromP_QIsProbabilisticDouble() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Collections.singletonList(v1), 1000); ProbabilisticDouble q1 = new GaussianVertex(0.1, 1.); ProbabilisticDouble q2 = new GaussianVertex(10.0, 1.); assertThat(KLDivergence.compute(q1, samples), lessThan(KLDivergence.compute(q2, samples))); }
@Test public void returnsLargerKLDivergenceIfTheLocationOfQIsFurtherFromP_QIsQDistribution() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Collections.singletonList(v1), 1000); QDistribution q1 = new TestGaussianQDistribution(0.1, 1.); QDistribution q2 = new TestGaussianQDistribution(10.0, 1.); assertThat(KLDivergence.compute(q1, samples), lessThan(KLDivergence.compute(q2, samples))); }
@Test public void samplesSimpleDiscretePriorWithDefaults() { MCMCTestCase testCase = new SingleVariateDiscreteTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVertices(), 10000 ); testCase.assertExpected(posteriorSamples); }
public NetworkSamples sample(int n) { BayesianNetwork myNet = new BayesianNetwork(infectedOysters.getConnectedGraph()); myNet.probeForNonZeroProbability(100, random); assertNotEquals(Double.NEGATIVE_INFINITY, myNet.getLogOfMasterP()); KeanuProbabilisticModel model = new KeanuProbabilisticModel(myNet); return MetropolisHastings.withDefaultConfigFor(model, random).getPosteriorSamples(model, myNet.getLatentVertices(), n); }
public static ABTestingMaximumAPosteriori run() { DoubleVertex probabilityA = new UniformVertex(0., 1.); DoubleVertex probabilityB = new UniformVertex(0., 1.); DoubleVertex delta = probabilityA.minus(probabilityB); BernoulliVertex observationA = new BernoulliVertex(probabilityA); BernoulliVertex observationB = new BernoulliVertex(probabilityB); // manufacture observations int nObsA = 1500; BooleanTensor observationsA = RANDOM.nextDouble(new long[]{1, nObsA}).lessThan(0.05); observationA.observe(observationsA); int nObsB = 750; BooleanTensor observationsB = RANDOM.nextDouble(new long[]{1, nObsB}).lessThan(0.04); observationB.observe(observationsB); //infer the most probable probabilities KeanuProbabilisticModel model = new KeanuProbabilisticModel(probabilityA.getConnectedGraph()); int sampleCount = 100; NetworkSamples networkSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .getPosteriorSamples(model, Arrays.asList(probabilityA, probabilityB, delta), sampleCount) .drop(sampleCount/2).downSample(model.getLatentVariables().size()); DoubleVertexSamples pASamples = networkSamples.getDoubleTensorSamples(probabilityA); DoubleVertexSamples pBSamples = networkSamples.getDoubleTensorSamples(probabilityB); //most probable probabilities are the averages of the MH walk in this case double mapPA = pASamples.getAverages().scalar(); double mapPB = pBSamples.getAverages().scalar(); return new ABTestingMaximumAPosteriori(mapPA, mapPB); }
@Test public void throwsExceptionIfNetworkStateHasMoreThanOneVertexAndQIsProbabilisticDouble() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Arrays.asList(v1, v3), 1000); ProbabilisticDouble q = new GaussianVertex(0.1, 1.); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("A NetworkState does not contain exactly 1 variable and ProbabilisticDouble can only compute the log probability of one value. Try computing KL divergence against a QDistribution instead."); KLDivergence.compute(q, samples); }
@Category(Slow.class) @Test public void samplesComplexDiscretePriorWithDefaults() { MCMCTestCase testCase = new MultiVariateDiscreteTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVertices(), 1000 ); testCase.assertExpected(posteriorSamples); }
@Test public void samplesFromPriorWithObservedDeterministic() { BernoulliVertex A = new BernoulliVertex(0.5); BernoulliVertex B = new BernoulliVertex(0.5); BooleanVertex C = A.or(B); C.observe(false); BayesianNetwork net = new BayesianNetwork(A.getConnectedGraph()); net.probeForNonZeroProbability(100); KeanuProbabilisticModel model = new KeanuProbabilisticModel(net); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, Collections.singletonList(A), 10000 ); double postProbTrue = posteriorSamples.get(A).probability(v -> v.scalar()); assertEquals(0.0, postProbTrue, 0.01); }
@Category(Slow.class) @Test public void samplesContinuousTensorPrior() { long[] shape = new long[]{1, 1}; DoubleVertex A = new GaussianVertex(shape, 20.0, 1.0); DoubleVertex B = new GaussianVertex(shape, 20.0, 1.0); A.setValue(20.0); B.setValue(20.0); DoubleVertex Cobserved = new GaussianVertex(A.plus(B), 1.0); Cobserved.observe(46.0); BayesianNetwork bayesNet = new BayesianNetwork(Arrays.asList(A, B, Cobserved)); bayesNet.probeForNonZeroProbability(100); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, Arrays.asList(A, B), 5000 ); DoubleTensor averagePosteriorA = posteriorSamples.getDoubleTensorSamples(A).getAverages(); DoubleTensor averagePosteriorB = posteriorSamples.getDoubleTensorSamples(B).getAverages(); DoubleTensor allActuals = averagePosteriorA.plus(averagePosteriorB); for (double actual : allActuals.asFlatDoubleArray()) { assertEquals(44.0, actual, 0.1); } }
private static void scalarAutocorrelationExample() { DoubleVertex A = new GaussianVertex(20.0, 1.0); DoubleVertex B = new GaussianVertex(20.0, 1.0); DoubleVertex C = new GaussianVertex(A.plus(B), 1.0); C.observe(43.0); A.setValue(20.0); B.setValue(20.0); KeanuProbabilisticModel model = new KeanuProbabilisticModel(C.getConnectedGraph()); //%%SNIPPET_START%% ScalarAutocorrelation NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVariables(), 100 ); DoubleTensor autocorrelation = posteriorSamples.getDoubleTensorSamples(A).getAutocorrelation(); //%%SNIPPET_END%% ScalarAutocorrelation }
private static void tensorAutocorrelationExample() { DoubleVertex A = new GaussianVertex(new long[]{1, 5}, 20.0, 1.0); DoubleVertex B = new GaussianVertex(new long[]{1, 5}, 20.0, 1.0); DoubleVertex C = new GaussianVertex(A.plus(B), 1.0); BayesianNetwork bayesNet = new BayesianNetwork(C.getConnectedGraph()); C.observe(new double[]{1, 4, 5, 7, 8}); bayesNet.probeForNonZeroProbability(100); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); //%%SNIPPET_START%% TensorAutocorrelation NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVariables(), 100 ); DoubleTensor autocorrelation = posteriorSamples.getDoubleTensorSamples(A).getAutocorrelation(0, 1); //%%SNIPPET_END%% TensorAutocorrelation } }
@Test public void samplesContinuousPriorSingleVariableSelected() { MCMCTestCase testCase = new SumGaussianTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = MetropolisHastings.builder() .proposalDistribution(new PriorProposalDistribution(model.getLatentVertices())) .rejectionStrategy(new RollBackToCachedValuesOnRejection(model.getLatentVertices())) .variableSelector(MHStepVariableSelector.SINGLE_VARIABLE_SELECTOR) .build() .getPosteriorSamples( model, model.getLatentVertices(), 5000 ); testCase.assertExpected(posteriorSamples); }
@Test public void samplesContinuousPriorAllVariablesSelected() { MCMCTestCase testCase = new SumGaussianTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = MetropolisHastings.builder() .proposalDistribution(new PriorProposalDistribution(model.getLatentVertices())) .rejectionStrategy(new RollBackToCachedValuesOnRejection(model.getLatentVertices())) .variableSelector(MHStepVariableSelector.FULL_VARIABLE_SELECTOR) .build() .getPosteriorSamples( model, model.getLatentVertices(), 5000 ); testCase.assertExpected(posteriorSamples); }
@Test public void youCanUseAGaussianProposal() { DoubleVertex A = new GaussianVertex(20.0, 1.0); DoubleVertex B = new GaussianVertex(20.0, 1.0); A.setValue(20.0); B.setValue(20.0); DoubleVertex Cobserved = new GaussianVertex(A.plus(B), 1.0); Cobserved.observe(46.0); BayesianNetwork bayesNet = new BayesianNetwork(Arrays.asList(A, B, Cobserved)); bayesNet.probeForNonZeroProbability(100); ProposalDistribution proposalDistribution = new GaussianProposalDistribution(DoubleTensor.scalar(1.)); MetropolisHastings metropolisHastings = MetropolisHastings.builder() .proposalDistribution(proposalDistribution) .rejectionStrategy(new RollbackAndCascadeOnRejection(bayesNet.getLatentVertices())) .build(); NetworkSamples posteriorSamples = metropolisHastings.getPosteriorSamples( new KeanuProbabilisticModel(bayesNet), Arrays.asList(A, B), 1000 ); double averagePosteriorA = posteriorSamples.getDoubleTensorSamples(A).getAverages().scalar(); double averagePosteriorB = posteriorSamples.getDoubleTensorSamples(B).getAverages().scalar(); double actual = averagePosteriorA + averagePosteriorB; assertEquals(44.0, actual, 0.1); }
@Test public void samplingWithAssertThatShouldntFire() { UniformVertex temperature = new UniformVertex(20., 30.); temperature.lessThan(new ConstantDoubleVertex(30)).assertTrue(); temperature.greaterThan(new ConstantDoubleVertex(20)).assertTrue(); GaussianVertex firstThermometer = new GaussianVertex(temperature, 2.5); GaussianVertex secondThermometer = new GaussianVertex(temperature, 5.); firstThermometer.observe(25.); secondThermometer.observe(30.); KeanuProbabilisticModel model = new KeanuProbabilisticModel(temperature.getConnectedGraph()); MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVariables(), 100 ); }
@Category(Slow.class) @Test public void modelWorksAsPartOfSampling() { inputToModel = new GaussianVertex(25, 5); weatherModel.setInputToModel(inputToModel); Map<VertexLabel, Vertex<? extends Tensor>> inputs = ImmutableMap.of(new VertexLabel("Temperature"), inputToModel); ModelVertex model = LambdaModelVertex.createFromProcess(inputs, COMMAND, weatherModel::updateValues); DoubleVertex chanceOfRain = model.getDoubleModelOutputVertex(new VertexLabel("ChanceOfRain")); DoubleVertex humidity = model.getDoubleModelOutputVertex(new VertexLabel("Humidity")); DoubleVertex chanceOfRainObservation = new GaussianVertex(chanceOfRain, 5); DoubleVertex humidityObservation = new GaussianVertex(humidity, 5); chanceOfRainObservation.observe(3.0); humidityObservation.observe(60.0); KeanuProbabilisticModel probabilisticModel = new KeanuProbabilisticModel(chanceOfRainObservation.getConnectedGraph()); NetworkSamples posteriorSamples = MetropolisHastings.withDefaultConfigFor(probabilisticModel).getPosteriorSamples( probabilisticModel, inputToModel, 220 ); double averagePosteriorInput = posteriorSamples.getDoubleTensorSamples(inputToModel).getAverages().scalar(); Assert.assertEquals(29., averagePosteriorInput, 0.3); }
@Category(Slow.class) @Test public void modelWorksAsPartOfSampling() { inputToModel = new GaussianVertex(25., 5); weatherModel.setInputToModel(inputToModel); Map<VertexLabel, Vertex<? extends Tensor>> inputs = ImmutableMap.of(new VertexLabel("Temperature"), inputToModel); ModelVertex model = new LambdaModelVertex(inputs, weatherModel::modelExecution, weatherModel::updateValues); DoubleVertex chanceOfRain = model.getDoubleModelOutputVertex(new VertexLabel("ChanceOfRain")); DoubleVertex humidity = model.getDoubleModelOutputVertex(new VertexLabel("Humidity")); //My prior belief is the temperature is 29.0. //These observations are indicative of a temperature of 30. DoubleVertex chanceOfRainObservation = new GaussianVertex(chanceOfRain, 5); DoubleVertex humidityObservation = new GaussianVertex(humidity, 5); humidityObservation.observe(60.0); chanceOfRainObservation.observe(3.0); KeanuProbabilisticModel probabilisticModel = new KeanuProbabilisticModel(chanceOfRainObservation.getConnectedGraph()); NetworkSamples posteriorSamples = MetropolisHastings.withDefaultConfigFor(probabilisticModel, random).getPosteriorSamples( probabilisticModel, inputToModel, 200 ); double averagePosteriorInput = posteriorSamples.getDoubleTensorSamples(inputToModel).getAverages().scalar(); Assert.assertEquals(29., averagePosteriorInput, 0.1); }
@Category(Slow.class) @Test public void samplesComplexDiscreteWithFullVariableSelect() { MCMCTestCase testCase = new MultiVariateDiscreteTestCase(); BayesianNetwork bayesNet = testCase.getModel(); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = MetropolisHastings.builder() .proposalDistribution(new PriorProposalDistribution(model.getLatentVertices())) .rejectionStrategy(new RollBackToCachedValuesOnRejection(model.getLatentVertices())) .variableSelector(MHStepVariableSelector.FULL_VARIABLE_SELECTOR) .build() .getPosteriorSamples( model, model.getLatentVertices(), 1000 ); testCase.assertExpected(posteriorSamples); }