@Override public PartialDerivative forwardModeAutoDifferentiation(Map<Vertex, PartialDerivative> derivativeOfParentsWithRespectToInput) { PartialDerivative derivativeOfParentWithRespectToInputs = derivativeOfParentsWithRespectToInput.get(inputVertex); return derivativeOfParentWithRespectToInputs.divideByAlongOfDimensions(inputVertex.getValue()); }
@Override public Map<Vertex, PartialDerivative> reverseModeAutoDifferentiation(PartialDerivative derivativeOfOutputWithRespectToSelf) { Map<Vertex, PartialDerivative> partials = new HashMap<>(); partials.put(inputVertex, derivativeOfOutputWithRespectToSelf .multiplyAlongWrtDimensions(inputVertex.getValue().reciprocal())); return partials; } }
@Override public Map<Vertex, DoubleTensor> dLogProb(BooleanTensor value, Set<? extends Vertex> withRespectTo) { if (!(probTrue.isDifferentiable())) { throw new UnsupportedOperationException("The probability of the Bernoulli being true must be differentiable"); } if (withRespectTo.contains(probTrue)) { DoubleTensor dLogPdp = Bernoulli.withParameters(probTrue.getValue()).dLogProb(value); return Collections.singletonMap(probTrue, dLogPdp); } return Collections.emptyMap(); }
@Override public Map<Vertex, PartialDerivative> reverseModeAutoDifferentiation(PartialDerivative derivativeOfOutputWithRespectToSelf) { DoubleTensor inputValue = inputVertex.getValue(); //dArcTandx = 1 / (1 + x^2) DoubleTensor dSelfWrtInput = inputValue.pow(2).plusInPlace(1).reciprocalInPlace(); Map<Vertex, PartialDerivative> partials = new HashMap<>(); partials.put(inputVertex, derivativeOfOutputWithRespectToSelf.multiplyAlongWrtDimensions(dSelfWrtInput)); return partials; } }
@Override public double logProb(DoubleTensor value) { DoubleTensor lambdaValues = rate.getValue(); DoubleTensor logPdfs = Exponential.withParameters(lambdaValues).logProb(value); return logPdfs.sum(); }
@Override public Map<Vertex, PartialDerivative> reverseModeAutoDifferentiation(PartialDerivative derivativeOfOutputWithRespectToSelf) { DoubleTensor inputValue = inputVertex.getValue(); //dArcSindx = 1 / sqrt(1 - x^2) DoubleTensor dSelfWrtInput = inputValue.pow(2).unaryMinusInPlace().plusInPlace(1) .sqrtInPlace() .reciprocalInPlace(); Map<Vertex, PartialDerivative> partials = new HashMap<>(); partials.put(inputVertex, derivativeOfOutputWithRespectToSelf.multiplyAlongWrtDimensions(dSelfWrtInput)); return partials; } }
private KeanuSavedBayesNet.StoredValue getValue(DoubleVertex vertex) { KeanuSavedBayesNet.DoubleTensor savedValue = getTensor(vertex.getValue()); KeanuSavedBayesNet.VertexValue value = KeanuSavedBayesNet.VertexValue.newBuilder() .setDoubleVal(savedValue) .build(); return getStoredValue(vertex, value); }
@Override public PartialDerivative forwardModeAutoDifferentiation(Map<Vertex, PartialDerivative> derivativeOfParentsWithRespectToInput) { PartialDerivative derivativeOfParentWithRespectToInputs = derivativeOfParentsWithRespectToInput.get(inputVertex); DoubleTensor inputValue = inputVertex.getValue(); DoubleTensor dArcCos = inputValue.unaryMinus().timesInPlace(inputValue).plusInPlace(1) .sqrtInPlace().reciprocalInPlace().unaryMinusInPlace(); return derivativeOfParentWithRespectToInputs.multiplyAlongOfDimensions(dArcCos); }
public static void operatesOnScalarVertexValue(double aValue, double expected, Function<DoubleVertex, DoubleVertex> op) { ConstantDoubleVertex A = ConstantVertex.of(aValue); assertEquals(expected, op.apply(A).getValue().scalar(), 1e-5); }
public static void matchesKnownLogDensity(LogProbGraph logProbGraph, double expectedLogDensity) { DoubleVertex logProbGraphOutput = logProbGraph.getLogProbOutput(); double actualDensity = logProbGraphOutput.getValue().sum(); assertEquals(expectedLogDensity, actualDensity, 1e-5); }
@Test public void canPerformSimpleLinearRegression() { RegressionModel model = linearRegression(radonData); assertThat(model.getWeightVertex().getValue().scalar(), both(greaterThan(-0.7)).and(lessThan(-0.4))); assertThat(model.getInterceptVertex().getValue().scalar(), both(greaterThan(1.2)).and(lessThan(1.5))); }
@Test public void doesLinearRegressionOnBMIAsModel() { Data data = csvDataResource.getData(); RegressionModel linearRegressionModel = RegressionModel .withTrainingData(data.bmi, data.y) .withRegularization(RegressionRegularization.RIDGE) .withPriorOnWeightsAndIntercept(0, 100) .build(); linearRegressionModel.fit(); assertThat(linearRegressionModel.getWeightVertex().getValue().scalar(), closeTo(938.2378, 0.5)); assertThat(linearRegressionModel.getInterceptVertex().getValue().scalar(), closeTo(152.9189,0.5)); }
@Test public void canSetAndCascadeArrayOfValues() { DoubleVertex gaussianVertex = new GaussianVertex(0, 1); double[] values = new double[]{1, 2, 3}; gaussianVertex.setAndCascade(values); assertArrayEquals(values, gaussianVertex.getValue().asFlatDoubleArray(), 0.0); }
@Test public void canSetValueAsScalarOnNonScalarVertex() { DoubleVertex gaussianVertex = new GaussianVertex(new long[]{1, 2}, 0, 1); gaussianVertex.setValue(2); assertArrayEquals(new double[]{2}, gaussianVertex.getValue().asFlatDoubleArray(), 0.0); }
@Test public void GIVEN_a_double_tensor_THEN_transform() { UniformVertex matrix = new UniformVertex(new long[]{2, 2}, 0, 5); matrix.setAndCascade(DoubleTensor.create(2.5, new long[]{2, 2})); DoubleVertex matrixLambda = matrix.lambda((val) -> val.times(2), null, null); assertArrayEquals(new double[]{5, 5, 5, 5}, matrixLambda.getValue().asFlatDoubleArray(), 0.001); }
@Test public void canSetWithMaskGivenScalar() { DoubleVertex mask = vertex.toGreaterThanMask(ConstantVertex.of(new double[]{2., 2., 2., 2.}, 2, 2)); DoubleVertex result = new DoubleSetWithMaskVertex(vertex, mask, ConstantVertex.of(-2.)); DoubleTensor expected = DoubleTensor.create(new double[] {1., 2., -2., -2.}, 2, 2); assertThat(expected, TensorMatchers.valuesAndShapesMatch(result.getValue())); }
private static double runGradientOptimizer(DoubleVertex temperature) { //%%SNIPPET_START%% GradientOptimizerMostProbable GradientOptimizer optimizer = KeanuOptimizer.Gradient.builderFor(temperature.getConnectedGraph()) .maxEvaluations(5000) .relativeThreshold(1e-8) .absoluteThreshold(1e-8) .build(); optimizer.maxAPosteriori(); double calculatedTemperature = temperature.getValue().scalar(); //%%SNIPPET_END%% GradientOptimizerMostProbable return calculatedTemperature; }
@Test public void logProbGraphMatchesKnownLogDensityOfVector() { DoubleVertex rate = ConstantVertex.of(1.0, 1.0); ExponentialVertex tensorExponentialVertex = new ExponentialVertex(rate); LogProbGraph logProbGraph = tensorExponentialVertex.logProbGraph(); LogProbGraphValueFeeder.feedValue(logProbGraph, rate, rate.getValue()); LogProbGraphValueFeeder.feedValue(logProbGraph, tensorExponentialVertex, DoubleTensor.create(0.25, 0.75)); ExponentialDistribution distribution = new ExponentialDistribution(1.0); double expectedDensity = distribution.logDensity(0.25) + distribution.logDensity(0.75); LogProbGraphContract.matchesKnownLogDensity(logProbGraph, expectedDensity); }
@Test public void canPermuteForTranpose() { DoubleVertex a = new UniformVertex(0, 10); a.setValue(DoubleTensor.create(new double[]{1, 2, 3, 4, 5, 6}, 2, 3)); PermuteVertex transpose = new PermuteVertex(a, 1, 0); Assert.assertArrayEquals(new long[]{3, 2}, transpose.getShape()); Assert.assertArrayEquals(a.getValue().transpose().asFlatDoubleArray(), transpose.getValue().asFlatDoubleArray(), 1e-6); }