KeanuProbabilisticModel model = new KeanuProbabilisticModel(net); Stream<NetworkSample> networkSamples = MetropolisHastings.withDefaultConfigFor(model).generatePosteriorSamples( model, Arrays.asList(oRingFailure, residualFuel, alarm1FalsePositive)
NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, Arrays.asList(sprinkler, rain),
NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(numSamples / 2)
NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVariables(),
NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, Arrays.asList(A, B),
NetworkSamplesGenerator samplesGenerator = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .generatePosteriorSamples(model, singletonList(probabilityOfCheating));
NetworkSamples networkSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(sampleCount / 2)
@Category(Slow.class) @Test public void samplesContinuousTensorPrior() { long[] shape = new long[]{1, 1}; DoubleVertex A = new GaussianVertex(shape, 20.0, 1.0); DoubleVertex B = new GaussianVertex(shape, 20.0, 1.0); A.setValue(20.0); B.setValue(20.0); DoubleVertex Cobserved = new GaussianVertex(A.plus(B), 1.0); Cobserved.observe(46.0); BayesianNetwork bayesNet = new BayesianNetwork(Arrays.asList(A, B, Cobserved)); bayesNet.probeForNonZeroProbability(100); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, Arrays.asList(A, B), 5000 ); DoubleTensor averagePosteriorA = posteriorSamples.getDoubleTensorSamples(A).getAverages(); DoubleTensor averagePosteriorB = posteriorSamples.getDoubleTensorSamples(B).getAverages(); DoubleTensor allActuals = averagePosteriorA.plus(averagePosteriorB); for (double actual : allActuals.asFlatDoubleArray()) { assertEquals(44.0, actual, 0.1); } }
public static ABTestingMaximumAPosteriori run() { DoubleVertex probabilityA = new UniformVertex(0., 1.); DoubleVertex probabilityB = new UniformVertex(0., 1.); DoubleVertex delta = probabilityA.minus(probabilityB); BernoulliVertex observationA = new BernoulliVertex(probabilityA); BernoulliVertex observationB = new BernoulliVertex(probabilityB); // manufacture observations int nObsA = 1500; BooleanTensor observationsA = RANDOM.nextDouble(new long[]{1, nObsA}).lessThan(0.05); observationA.observe(observationsA); int nObsB = 750; BooleanTensor observationsB = RANDOM.nextDouble(new long[]{1, nObsB}).lessThan(0.04); observationB.observe(observationsB); //infer the most probable probabilities KeanuProbabilisticModel model = new KeanuProbabilisticModel(probabilityA.getConnectedGraph()); int sampleCount = 100; NetworkSamples networkSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .getPosteriorSamples(model, Arrays.asList(probabilityA, probabilityB, delta), sampleCount) .drop(sampleCount/2).downSample(model.getLatentVariables().size()); DoubleVertexSamples pASamples = networkSamples.getDoubleTensorSamples(probabilityA); DoubleVertexSamples pBSamples = networkSamples.getDoubleTensorSamples(probabilityB); //most probable probabilities are the averages of the MH walk in this case double mapPA = pASamples.getAverages().scalar(); double mapPB = pBSamples.getAverages().scalar(); return new ABTestingMaximumAPosteriori(mapPA, mapPB); }
@Category(Slow.class) @Test public void modelWorksAsPartOfSampling() { inputToModel = new GaussianVertex(25., 5); weatherModel.setInputToModel(inputToModel); Map<VertexLabel, Vertex<? extends Tensor>> inputs = ImmutableMap.of(new VertexLabel("Temperature"), inputToModel); ModelVertex model = new LambdaModelVertex(inputs, weatherModel::modelExecution, weatherModel::updateValues); DoubleVertex chanceOfRain = model.getDoubleModelOutputVertex(new VertexLabel("ChanceOfRain")); DoubleVertex humidity = model.getDoubleModelOutputVertex(new VertexLabel("Humidity")); //My prior belief is the temperature is 29.0. //These observations are indicative of a temperature of 30. DoubleVertex chanceOfRainObservation = new GaussianVertex(chanceOfRain, 5); DoubleVertex humidityObservation = new GaussianVertex(humidity, 5); humidityObservation.observe(60.0); chanceOfRainObservation.observe(3.0); KeanuProbabilisticModel probabilisticModel = new KeanuProbabilisticModel(chanceOfRainObservation.getConnectedGraph()); NetworkSamples posteriorSamples = MetropolisHastings.withDefaultConfigFor(probabilisticModel, random).getPosteriorSamples( probabilisticModel, inputToModel, 200 ); double averagePosteriorInput = posteriorSamples.getDoubleTensorSamples(inputToModel).getAverages().scalar(); Assert.assertEquals(29., averagePosteriorInput, 0.1); }
@Category(Slow.class) @Test public void modelWorksAsPartOfSampling() { inputToModel = new GaussianVertex(25, 5); weatherModel.setInputToModel(inputToModel); Map<VertexLabel, Vertex<? extends Tensor>> inputs = ImmutableMap.of(new VertexLabel("Temperature"), inputToModel); ModelVertex model = LambdaModelVertex.createFromProcess(inputs, COMMAND, weatherModel::updateValues); DoubleVertex chanceOfRain = model.getDoubleModelOutputVertex(new VertexLabel("ChanceOfRain")); DoubleVertex humidity = model.getDoubleModelOutputVertex(new VertexLabel("Humidity")); DoubleVertex chanceOfRainObservation = new GaussianVertex(chanceOfRain, 5); DoubleVertex humidityObservation = new GaussianVertex(humidity, 5); chanceOfRainObservation.observe(3.0); humidityObservation.observe(60.0); KeanuProbabilisticModel probabilisticModel = new KeanuProbabilisticModel(chanceOfRainObservation.getConnectedGraph()); NetworkSamples posteriorSamples = MetropolisHastings.withDefaultConfigFor(probabilisticModel).getPosteriorSamples( probabilisticModel, inputToModel, 220 ); double averagePosteriorInput = posteriorSamples.getDoubleTensorSamples(inputToModel).getAverages().scalar(); Assert.assertEquals(29., averagePosteriorInput, 0.3); }
private static void tensorAutocorrelationExample() { DoubleVertex A = new GaussianVertex(new long[]{1, 5}, 20.0, 1.0); DoubleVertex B = new GaussianVertex(new long[]{1, 5}, 20.0, 1.0); DoubleVertex C = new GaussianVertex(A.plus(B), 1.0); BayesianNetwork bayesNet = new BayesianNetwork(C.getConnectedGraph()); C.observe(new double[]{1, 4, 5, 7, 8}); bayesNet.probeForNonZeroProbability(100); KeanuProbabilisticModel model = new KeanuProbabilisticModel(bayesNet); //%%SNIPPET_START%% TensorAutocorrelation NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVariables(), 100 ); DoubleTensor autocorrelation = posteriorSamples.getDoubleTensorSamples(A).getAutocorrelation(0, 1); //%%SNIPPET_END%% TensorAutocorrelation } }
public static double runUsingBinomial(int numberOfStudents, int numberOfYesAnswers) { int numberOfSamples = 100; UniformVertex probabilityOfCheating = new UniformVertex(0.0, 1.0); DoubleVertex pYesAnswer = probabilityOfCheating.times(0.5).plus(0.25); BinomialVertex answerTotal = new BinomialVertex(pYesAnswer, numberOfStudents); answerTotal.observe(numberOfYesAnswers); KeanuProbabilisticModel model = new KeanuProbabilisticModel(answerTotal.getConnectedGraph()); NetworkSamplesGenerator samplesGenerator = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .generatePosteriorSamples(model, singletonList(probabilityOfCheating)); NetworkSamples networkSamples = samplesGenerator.dropCount(numberOfSamples / 10) .downSampleInterval(model.getLatentVariables().size()) .generate(numberOfSamples); double approximateProbabilityOfCheating = networkSamples .getDoubleTensorSamples(probabilityOfCheating) .getAverages() .scalar(); return approximateProbabilityOfCheating; }
private static void scalarAutocorrelationExample() { DoubleVertex A = new GaussianVertex(20.0, 1.0); DoubleVertex B = new GaussianVertex(20.0, 1.0); DoubleVertex C = new GaussianVertex(A.plus(B), 1.0); C.observe(43.0); A.setValue(20.0); B.setValue(20.0); KeanuProbabilisticModel model = new KeanuProbabilisticModel(C.getConnectedGraph()); //%%SNIPPET_START%% ScalarAutocorrelation NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVariables(), 100 ); DoubleTensor autocorrelation = posteriorSamples.getDoubleTensorSamples(A).getAutocorrelation(); //%%SNIPPET_END%% ScalarAutocorrelation }
@Test public void samplesFromPriorWithObservedDeterministic() { BernoulliVertex A = new BernoulliVertex(0.5); BernoulliVertex B = new BernoulliVertex(0.5); BooleanVertex C = A.or(B); C.observe(false); BayesianNetwork net = new BayesianNetwork(A.getConnectedGraph()); net.probeForNonZeroProbability(100); KeanuProbabilisticModel model = new KeanuProbabilisticModel(net); NetworkSamples posteriorSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, Collections.singletonList(A), 10000 ); double postProbTrue = posteriorSamples.get(A).probability(v -> v.scalar()); assertEquals(0.0, postProbTrue, 0.01); }
@Test public void doesNotStoreSamplesThatWillBeDropped() { int sampleCount = 1000; int dropCount = 100; int downSampleInterval = 2; GaussianVertex A = new GaussianVertex(0, 1); KeanuProbabilisticModel model = new KeanuProbabilisticModel(A.getConnectedGraph()); NetworkSamples samples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(dropCount) .downSampleInterval(downSampleInterval) .generate(sampleCount); assertEquals((sampleCount - dropCount) / downSampleInterval, samples.size()); assertEquals(0.0, samples.getDoubleTensorSamples(A).getAverages().scalar(), 0.1); }
@Test public void samplingWithAssertThatShouldntFire() { UniformVertex temperature = new UniformVertex(20., 30.); temperature.lessThan(new ConstantDoubleVertex(30)).assertTrue(); temperature.greaterThan(new ConstantDoubleVertex(20)).assertTrue(); GaussianVertex firstThermometer = new GaussianVertex(temperature, 2.5); GaussianVertex secondThermometer = new GaussianVertex(temperature, 5.); firstThermometer.observe(25.); secondThermometer.observe(30.); KeanuProbabilisticModel model = new KeanuProbabilisticModel(temperature.getConnectedGraph()); MetropolisHastings.withDefaultConfigFor(model).getPosteriorSamples( model, model.getLatentVariables(), 100 ); }
public static io.improbable.keanu.algorithms.mcmc.MetropolisHastings withDefaultConfigFor(KeanuProbabilisticModel model) { return withDefaultConfigFor(model, KeanuRandom.getDefaultRandom()); }
@Test public void canStreamSamples() { int sampleCount = 1000; int dropCount = 100; int downSampleInterval = 1; GaussianVertex A = new GaussianVertex(0, 1); KeanuProbabilisticModel model = new KeanuProbabilisticModel(A.getConnectedGraph()); MetropolisHastings algo = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model); double averageA = algo.generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(dropCount) .downSampleInterval(downSampleInterval) .stream() .limit(sampleCount) .mapToDouble(networkState -> networkState.get(A).scalar()) .average().getAsDouble(); assertEquals(0.0, averageA, 0.1); }
@Test public void returnsLargerKLDivergenceIfTheLocationOfQIsFurtherFromP_QIsQDistribution() { GaussianVertex v1 = new GaussianVertex(0., 1.); ConstantDoubleVertex v2 = new ConstantDoubleVertex(0.1); DoubleVertex v3 = v1.plus(v2); KeanuProbabilisticModel model = new KeanuProbabilisticModel(v3.getConnectedGraph()); NetworkSamples samples = MetropolisHastings .withDefaultConfigFor(model) .getPosteriorSamples(model, Collections.singletonList(v1), 1000); QDistribution q1 = new TestGaussianQDistribution(0.1, 1.); QDistribution q2 = new TestGaussianQDistribution(10.0, 1.); assertThat(KLDivergence.compute(q1, samples), lessThan(KLDivergence.compute(q2, samples))); }