@Test(expected = IllegalArgumentException.class) public void doesNotAllowZeroDownSample() { TestSamplingAlgorithm algorithm = new TestSamplingAlgorithm(new AtomicInteger(0), new AtomicInteger(0)); NetworkSamplesGenerator unitUnderTest = new NetworkSamplesGenerator(algorithm, StatusBar::new); unitUnderTest.downSampleInterval(0).stream(); }
@Test public void streamsExpectedNumberOfSamples() { AtomicInteger stepCount = new AtomicInteger(0); AtomicInteger sampleCount = new AtomicInteger(0); TestSamplingAlgorithm algorithm = new TestSamplingAlgorithm(stepCount, sampleCount); NetworkSamplesGenerator unitUnderTest = new NetworkSamplesGenerator(algorithm, StatusBar::new); int totalCollected = 5; int dropCount = 3; int downSampleInterval = 2; unitUnderTest.dropCount(dropCount).downSampleInterval(downSampleInterval); unitUnderTest.stream() .limit(totalCollected) .collect(Collectors.toList()); //expected step + sample count differs from generate case due to different behaviour int expectedTotal = dropCount + totalCollected * downSampleInterval; assertEquals(expectedTotal, algorithm.stepCount.get() + algorithm.sampleCount.get()); assertEquals(totalCollected, algorithm.sampleCount.get()); }
/** * Runs the MetropolisHastings algorithm and saves the resulting samples to results */ public void run() { KeanuProbabilisticModel model = new KeanuProbabilisticModel(buildBayesianNetwork()); Integer numSamples = 500; results = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model).generatePosteriorSamples( model, model.getLatentVariables() ).dropCount(numSamples/5).downSampleInterval(3).generate(numSamples); }
@Test public void dropsAndSamplesExpectedNumberOfStepsOnGeneration() { AtomicInteger stepCount = new AtomicInteger(0); AtomicInteger sampleCount = new AtomicInteger(0); TestSamplingAlgorithm algorithm = new TestSamplingAlgorithm(stepCount, sampleCount); NetworkSamplesGenerator unitUnderTest = new NetworkSamplesGenerator(algorithm, StatusBar::new); int totalGenerated = 12; int dropCount = 3; int downSampleInterval = 2; unitUnderTest.dropCount(dropCount).downSampleInterval(downSampleInterval); NetworkSamples samples = unitUnderTest.generate(totalGenerated); int expectedCollected = (int) Math.ceil((totalGenerated - dropCount) / (double) downSampleInterval); assertEquals(totalGenerated, algorithm.stepCount.get() + algorithm.sampleCount.get()); assertEquals(expectedCollected, samples.size()); }
public static double runUsingBinomial(int numberOfStudents, int numberOfYesAnswers) { int numberOfSamples = 100; UniformVertex probabilityOfCheating = new UniformVertex(0.0, 1.0); DoubleVertex pYesAnswer = probabilityOfCheating.times(0.5).plus(0.25); BinomialVertex answerTotal = new BinomialVertex(pYesAnswer, numberOfStudents); answerTotal.observe(numberOfYesAnswers); KeanuProbabilisticModel model = new KeanuProbabilisticModel(answerTotal.getConnectedGraph()); NetworkSamplesGenerator samplesGenerator = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .generatePosteriorSamples(model, singletonList(probabilityOfCheating)); NetworkSamples networkSamples = samplesGenerator.dropCount(numberOfSamples / 10) .downSampleInterval(model.getLatentVariables().size()) .generate(numberOfSamples); double approximateProbabilityOfCheating = networkSamples .getDoubleTensorSamples(probabilityOfCheating) .getAverages() .scalar(); return approximateProbabilityOfCheating; }
@Test public void canStreamSamples() { int sampleCount = 1000; int dropCount = 100; int downSampleInterval = 1; GaussianVertex A = new GaussianVertex(0, 1); KeanuProbabilisticModel model = new KeanuProbabilisticModel(A.getConnectedGraph()); MetropolisHastings algo = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model); double averageA = algo.generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(dropCount) .downSampleInterval(downSampleInterval) .stream() .limit(sampleCount) .mapToDouble(networkState -> networkState.get(A).scalar()) .average().getAsDouble(); assertEquals(0.0, averageA, 0.1); }
.downSampleInterval(model.getLatentVariables().size()) .generate(numberOfSamples);
.generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(sampleCount / 2) .downSampleInterval(model.getLatentVariables().size()) .generate(sampleCount);
.generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(numSamples / 2) .downSampleInterval(model.getLatentVariables().size()) .generate(numSamples);
@Test public void doesNotStoreSamplesThatWillBeDropped() { int sampleCount = 1000; int dropCount = 100; int downSampleInterval = 2; GaussianVertex A = new GaussianVertex(0, 1); KeanuProbabilisticModel model = new KeanuProbabilisticModel(A.getConnectedGraph()); NetworkSamples samples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .generatePosteriorSamples(model, model.getLatentVariables()) .dropCount(dropCount) .downSampleInterval(downSampleInterval) .generate(sampleCount); assertEquals((sampleCount - dropCount) / downSampleInterval, samples.size()); assertEquals(0.0, samples.getDoubleTensorSamples(A).getAverages().scalar(), 0.1); }