public static KDEVertex approximate(DoubleVertex vertex, Integer nSamples) { KeanuProbabilisticModel model = new KeanuProbabilisticModel(vertex.getConnectedGraph()); DoubleVertexSamples vertexSamples = Keanu.Sampling.MetropolisHastings.withDefaultConfigFor(model) .getPosteriorSamples(model, ImmutableList.of(vertex), nSamples).getDoubleTensorSamples(vertex); return approximate(vertexSamples); }
public static DoubleVertex logProbOutput(DoublePlaceholderVertex t, IntegerPlaceHolderVertex v) { DoubleVertex vAsDouble = v.toDouble(); DoubleVertex halfVPlusOne = vAsDouble.plus(1.).div(2.); DoubleVertex logGammaHalfVPlusOne = halfVPlusOne.logGamma(); DoubleVertex logGammaHalfV = vAsDouble.div(2.).logGamma(); DoubleVertex halfLogV = vAsDouble.log().div(2.); return logGammaHalfVPlusOne .minus(halfLogV) .minus(HALF_LOG_PI) .minus(logGammaHalfV) .minus( halfVPlusOne.times( t.pow(2.).div(vAsDouble).plus(1.).log() ) ); }
DoubleTensor dLogProbOfWrtVertexWithDiff = dlogProbWrtVertex.getValue(); if (vertexWithDiff.equals(ofVertex)) { dOfWrtLatentsAccumulated.putWithRespectTo(vertexWithDiff.getId(), dLogProbOfWrtVertexWithDiff); } else { PartialDerivative correctForScalarReverse = AutoDiffBroadcast.correctForBroadcastPartialReverse(partialWrtVertexWithDiff, ofVertex.getShape(), vertexWithDiff.getShape());
public static DoubleVertex logProbOutput(DoublePlaceholderVertex x, IntegerPlaceHolderVertex k) { final DoubleVertex halfK = k.toDouble().div(2.); final DoubleVertex numerator = halfK.minus(1.).times(x.log()).minus(x.div(2.)); final DoubleVertex denominator = halfK.times(LOG_TWO).plus(halfK.logGamma()); return numerator.minus(denominator); }
public static DoubleVertex logProbOutput(DoublePlaceholderVertex x, DoublePlaceholderVertex lambda) { final DoubleVertex negXMinusADivB = x.unaryMinus().div(lambda); final DoubleVertex negXMinusADivBMinusLogB = negXMinusADivB.minus(lambda.log()); return negXMinusADivBMinusLogB.setWithMask(x.toLessThanMask(0.), Double.NEGATIVE_INFINITY); }
private double calculateStepsize(DoubleVertex vertex, double startingValue) { List<DoubleVertex> vertices = Arrays.asList(vertex); KeanuProbabilisticModelWithGradient model = new KeanuProbabilisticModelWithGradient(vertex.getConnectedGraph()); VertexId vertexId = vertex.getId(); vertex.setValue(DoubleTensor.scalar(startingValue)); Map<VariableReference, DoubleTensor> position = Collections.singletonMap(vertexId, vertex.getValue()); Map<? extends VariableReference, DoubleTensor> gradient = model.logProbGradients(); return Stepsize.findStartingStepSize( position, gradient, Collections.singletonList(vertex), model, ProbabilityCalculator.calculateLogProbFor(vertices), random ); }
private void assertCanCalculateMaxLikelihood(Function<BayesianNetwork, Optimizer> optimizerMapper) { DoubleVertex A = new GaussianVertex(20.0, 1.0); DoubleVertex B = new GaussianVertex(20.0, 1.0); A.setValue(20.0); B.setAndCascade(20.0); DoubleVertex Cobserved = new GaussianVertex(A.plus(B), 1.0); Cobserved.observe(44.0); BayesianNetwork bayesNet = new BayesianNetwork(Arrays.asList(A, B, Cobserved)); Optimizer optimizer = optimizerMapper.apply(bayesNet); optimizer.maxLikelihood(); double maxA = A.getValue().scalar(); double maxB = B.getValue().scalar(); assertEquals(44, maxA + maxB, 0.1); }
@Test public void doesMatchReverseAutoDiffWithManyOps() { long[] shape = new long[]{2, 2}; DoubleVertex A = new GaussianVertex(shape, 0, 1); A.setValue(DoubleTensor.linspace(0.1, 2, 4).reshape(shape)); DoubleVertex B = new GaussianVertex(shape, 0, 1); B.setValue(DoubleTensor.linspace(0.2, 1, 4).reshape(shape)); DoubleVertex D = A.atan2(B).sigmoid().times(B); DoubleVertex C = A.sin().cos().div(D); DoubleVertex E = C.times(D).pow(A).acos(); DoubleVertex G = E.log().tan().asin().atan(); DoubleVertex F = D.plus(B).exp(); SumVertex H = G.plus(F).sum(); GaussianVertex J = new GaussianVertex(H, 1); J.observe(0.5); LogProbGradientCalculator calculator = new LogProbGradientCalculator(ImmutableList.of(J), ImmutableList.of(A, B)); Map<VertexId, DoubleTensor> gradient = calculator.getJointLogProbGradientWrtLatents(); DoubleTensor dJLogProbWrtAValue = gradient.get(A.getId()); DoubleTensor dJLogProbWrtBValue = gradient.get(B.getId()); PartialsOf dHForward = Differentiator.reverseModeAutoDiff(H, A, B); DoubleTensor dHdA = dHForward.withRespectTo(A); DoubleTensor dHdB = dHForward.withRespectTo(B); DoubleTensor dJLogProbWrtH = J.dLogProbAtValue(H).get(H); DoubleTensor expectedDJLogProbWrtAValue = dJLogProbWrtH.times(dHdA); DoubleTensor expectedDJLogProbWrtBValue = dJLogProbWrtH.times(dHdB); assertEquals(expectedDJLogProbWrtAValue, dJLogProbWrtAValue); assertEquals(expectedDJLogProbWrtBValue, dJLogProbWrtBValue); }
static TestData generateMultiFeatureData(int featureCount, Function<long[], DoubleVertex> weightVertexFromShape) { long N = 1000; double expectedB = 20; DoubleVertex xGenerator = new UniformVertex(new long[]{N, featureCount}, 0, 100); DoubleVertex weightsGenerator = weightVertexFromShape.apply(new long[]{featureCount,1}); DoubleVertex yGenerator = new GaussianVertex(new long[]{N, 1}, xGenerator.matrixMultiply(weightsGenerator).plus(expectedB), 1.0); DoubleTensor xData = xGenerator.sample(); DoubleTensor weights = weightsGenerator.sample(); xGenerator.setValue(xData); weightsGenerator.setValue(weights); DoubleTensor yData = yGenerator.getValue(); return new TestData(weights, expectedB, xData, yData); }
private void assertOptimizerWorksWithDeterminant(double inputGaussianMu) { final long[] shape = new long[]{2, 2}; final DoubleVertex input = new GaussianVertex(shape, inputGaussianMu, 5); final DoubleVertex determinant = input.matrixDeterminant(); final DoubleVertex output = new GaussianVertex(determinant, 1); output.observe(new double[]{2.0, 2.4}); final BayesianNetwork net = new BayesianNetwork(output.getConnectedGraph()); KeanuOptimizer.of(net).maxLikelihood(); assertEquals(input.getValue().determinant(), 2.2, 0.1); }
public SumGaussianTestCase() { A = new GaussianVertex(20.0, 1.0); B = new GaussianVertex(20.0, 1.0); A.setValue(20.0); B.setValue(20.0); DoubleVertex Cobserved = new GaussianVertex(A.plus(B), 1.0); Cobserved.observe(46.0); model = new BayesianNetwork(Arrays.asList(A, B, Cobserved)); model.probeForNonZeroProbability(100); }
static TestData generateSingleFeatureData(int numSamples) { DoubleVertex xGenerator = new UniformVertex(new long[]{numSamples, 1}, 0, 10); DoubleVertex mu = xGenerator.multiply(EXPECTED_W1).plus(EXPECTED_B); DoubleVertex yGenerator = new GaussianVertex(mu, 1.0); DoubleTensor xData = xGenerator.sample(); xGenerator.setAndCascade(xData); DoubleTensor yData = yGenerator.sample(); return new TestData(DoubleTensor.scalar(EXPECTED_W1), EXPECTED_B, xData, yData); }
@Test public void calculatesBoundsWhenAllAreSpecified() { DoubleVertex A = new UniformVertex(new long[]{2}, -2, 1); DoubleVertex B = new UniformVertex(new long[]{2}, -2, 1); B.observe(2); DoubleVertex D = A.plus(B); OptimizerBounds bounds = new OptimizerBounds(); bounds.addBound(A.getId(), DoubleTensor.create(-2, -1), 1); ApacheMathSimpleBoundsCalculator boundsCalculator = new ApacheMathSimpleBoundsCalculator(Double.POSITIVE_INFINITY, bounds); ImmutableList<DoubleVertex> latentVertices = ImmutableList.of(A); SimpleBounds simpleBounds = boundsCalculator.getBounds(latentVertices, new double[]{0, 0, 0}); assertArrayEquals(new double[]{-2, -1}, simpleBounds.getLower(), 0.0); assertArrayEquals(new double[]{1, 1}, simpleBounds.getUpper(), 0.0); }
@Test public void valuesAreBeingWrittenOut() throws IOException { DoubleVertex unobservedGaussianVertex = new GaussianVertex(0, 1); DoubleVertex observedGammaVertex = new GammaVertex(2, 3); observedGammaVertex.observe(2.5); DoubleVertex gammaMultipliedVertex = observedGammaVertex.times(new ConstantDoubleVertex(4)); Vertex resultVertex = gammaMultipliedVertex.plus(unobservedGaussianVertex); gammaMultipliedVertex.setLabel("Gamma Multiplied"); DotSaver dotSaver = new DotSaver(new BayesianNetwork(resultVertex.getConnectedGraph())); dotSaver.save(outputWriter, true); String expectedOutputWithValues = readFileToString(OUTPUT_WITH_VALUES_FILENAME); checkDotFilesMatch(outputWriter.toString(), expectedOutputWithValues); }
private static double runNonGradientOptimizer(DoubleVertex temperature) { //%%SNIPPET_START%% NonGradientOptimizerMostProbable OptimizerBounds temperatureBounds = new OptimizerBounds().addBound(temperature.getId(), -250., 250.0); NonGradientOptimizer optimizer = KeanuOptimizer.NonGradient.builderFor(temperature.getConnectedGraph()) .maxEvaluations(5000) .boundsRange(100000) .optimizerBounds(temperatureBounds) .initialTrustRegionRadius(5.) .stoppingTrustRegionRadius(2e-8) .build(); optimizer.maxAPosteriori(); double calculatedTemperature = temperature.getValue().scalar(); //%%SNIPPET_END%% NonGradientOptimizerMostProbable return calculatedTemperature; } }
@Test public void canSetAndCascadeArrayOfValues() { DoubleVertex gaussianVertex = new GaussianVertex(0, 1); double[] values = new double[]{1, 2, 3}; gaussianVertex.setAndCascade(values); assertArrayEquals(values, gaussianVertex.getValue().asFlatDoubleArray(), 0.0); }