/** * Run the exp operation * @param ndArray * @return */ public static INDArray exp(INDArray ndArray) { return exp(ndArray, true); }
public static void main(String[] args) { INDArray nd = Nd4j.create(new float[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, new int[]{2, 6}); INDArray nd2 = Nd4j.create(new float[]{15,16,17,18,19,20,21,22,23,24,25,26,27,28}, new int[]{2, 7}); INDArray ndv; // a placeholder variable to print out and leave the original data unchanged //this normalizes data and helps activate artificial neurons in deep-learning nets and assigns it to var ndv ndv = sigmoid(nd); System.out.println(ndv); //this gives you absolute value ndv = abs(nd); System.out.println(ndv); //a hyperbolic function to transform data much like sigmoid. ndv = tanh(nd); System.out.println(ndv); // ndv = hardTanh(nd); // System.out.println(ndv); //exponentiation ndv = exp(nd); System.out.println(ndv); //square root ndv = sqrt(nd); System.out.println(ndv); } }
@Override public void exec(){ Nd4j.getExecutioner().exec(new Sigmoid(x,z)); z.muli(Transforms.exp(x.neg(),false)).muli(y); }
/** * This method calculates 'phi' which is the probability * density function (see Bishop 23) * @param diffSquared This is the 'x-mu' term of the Gaussian distribution (distance between 'x' and the mean value of the distribution). * @param sigma This is the standard deviation of the Gaussian distribution. * @return This returns an array of shape [nsamples, nlabels, ndistributions] which contains the probability density (phi) for each of the * samples * labels * distributions for the given x, sigma, mu. */ private INDArray phi(INDArray diffSquared, INDArray sigma) { // 1/sqrt(2PIs^2) * e^((in-u)^2/2*s^2) INDArray minustwovariance = sigma.mul(sigma).muli(2).negi(); // This is phi_i(x,mu,sigma) INDArray likelihoods = Transforms.exp(diffSquared.divi(minustwovariance)) .divi(Transforms.pow(sigma.mul(SQRT_TWO_PI), (double) mLabelWidth)); return likelihoods; }
mdc.sigma = Transforms.exp(mdc.sigma);
final INDArray operandB = operandA.transpose(); final INDArray pairwiseSub = Transforms.exp(operandA.sub(operandB));
INDArray exponentMax = exponent.max(1); exponent.subiColumnVector(exponentMax); INDArray pi = Transforms.exp(exponent).muli(normalPart); INDArray piDivisor = pi.sum(1); pi.diviColumnVector(piDivisor);
@Override public DoubleTensor expInPlace() { Transforms.exp(tensor, false); return this; }
public static INDArray exp(INDArray ndArray) { return exp(ndArray, Nd4j.copyOnOps); }
@Override public INDArray generateAtMean(INDArray preOutDistributionParams) { //Input: gamma = log(lambda) -> lambda = exp(gamma) //Mean for exponential distribution: 1/lambda INDArray gamma = activationFn.getActivation(preOutDistributionParams.dup(), false); INDArray lambda = Transforms.exp(gamma, true); return lambda.rdivi(1.0); //mean = 1.0 / lambda }
@Override public INDArray op(INDArray neighbourhoodFeatures, INDArray nodeFeature) { double sigma = 16; final INDArray norm2 = Transforms.pow(neighbourhoodFeatures.subRowVector(nodeFeature), 2).sum(0); norm2.divi(-sigma * sigma); return Transforms.exp(norm2); }
@Override public double negLogProbability(INDArray x, INDArray preOutDistributionParams, boolean average) { //p(x) = lambda * exp( -lambda * x) //logp(x) = log(lambda) - lambda * x = gamma - lambda * x INDArray gamma = preOutDistributionParams.dup(); activationFn.getActivation(gamma, false); INDArray lambda = Transforms.exp(gamma, true); double negLogProbSum = -lambda.muli(x).rsubi(gamma).sumNumber().doubleValue(); if (average) { return negLogProbSum / x.size(0); } else { return negLogProbSum; } }
/** * Computes a gaussian kernel * given a vector of squared distance distances * * @param d the data * @param beta * @return */ public Pair<Double, INDArray> hBeta(INDArray d, double beta) { INDArray P = exp(d.neg().muli(beta)); double sumP = P.sumNumber().doubleValue(); double logSumP = FastMath.log(sumP); Double H = logSumP + ((beta * (d.mul(P).sumNumber().doubleValue())) / sumP); P.divi(sumP); return new Pair<>(H, P); }
@Override public INDArray exampleNegLogProbability(INDArray x, INDArray preOutDistributionParams) { INDArray gamma = preOutDistributionParams.dup(); activationFn.getActivation(gamma, false); INDArray lambda = Transforms.exp(gamma, true); return lambda.muli(x).rsubi(gamma).sum(1).negi(); }
@Override public INDArray gradient(INDArray x, INDArray preOutDistributionParams) { //p(x) = lambda * exp( -lambda * x) //logp(x) = log(lambda) - lambda * x = gamma - lambda * x //dlogp(x)/dgamma = 1 - lambda * x (or negative of this for d(-logp(x))/dgamma INDArray gamma = activationFn.getActivation(preOutDistributionParams.dup(), true); INDArray lambda = Transforms.exp(gamma, true); INDArray dLdx = x.mul(lambda).subi(1.0); //dL/dz return activationFn.backprop(preOutDistributionParams.dup(), dLdx).getFirst(); }
@Override public INDArray generateRandom(INDArray preOutDistributionParams) { INDArray gamma = activationFn.getActivation(preOutDistributionParams.dup(), false); INDArray lambda = Transforms.exp(gamma, true); //Inverse cumulative distribution function: -log(1-p)/lambda INDArray u = Nd4j.rand(preOutDistributionParams.shape()); //Note here: if u ~ U(0,1) then 1-u ~ U(0,1) return Transforms.log(u, false).divi(lambda).negi(); }
/** * This method calculates 'phi' which is the probability * density function (see Bishop 23) * @param diffSquared This is the 'x-mu' term of the Gaussian distribution (distance between 'x' and the mean value of the distribution). * @param sigma This is the standard deviation of the Gaussian distribution. * @return This returns an array of shape [nsamples, nlabels, ndistributions] which contains the probability density (phi) for each of the * samples * labels * distributions for the given x, sigma, mu. */ private INDArray phi(INDArray diffSquared, INDArray sigma) { // 1/sqrt(2PIs^2) * e^((in-u)^2/2*s^2) INDArray minustwovariance = sigma.mul(sigma).muli(2).negi(); // This is phi_i(x,mu,sigma) INDArray likelihoods = Transforms.exp(diffSquared.divi(minustwovariance)) .divi(Transforms.pow(sigma.mul(SQRT_TWO_PI), (double)mLabelWidth)); return likelihoods; }
@Override public INDArray ndOp(INDArray features, INDArray adjacencyMatrix) { double sigma = 16; INDArray[] sumsOfSquareDiffs = new INDArray[adjacencyMatrix.rows()]; for (int node = 0; node < adjacencyMatrix.rows(); node++) { INDArray column = adjacencyMatrix.getColumn(node); INDArray repeat = features.getRow(node).repeat(0, features.rows()).muliColumnVector(column); INDArray sub = repeat.sub(features.mulColumnVector(column)); sumsOfSquareDiffs[node] = Transforms.pow(sub, 2).sum(0); } INDArray sumOfSquareDiffs = Nd4j.vstack(sumsOfSquareDiffs).muli(-(1d / Math.pow(sigma, 2))); return Transforms.exp(sumOfSquareDiffs); }
private INDArray[] calcLogProbArrayExConstants(INDArray x, INDArray preOutDistributionParams) { INDArray output = preOutDistributionParams.dup(); activationFn.getActivation(output, false); int size = output.size(1) / 2; INDArray mean = output.get(NDArrayIndex.all(), NDArrayIndex.interval(0, size)); INDArray logStdevSquared = output.get(NDArrayIndex.all(), NDArrayIndex.interval(size, 2 * size)); INDArray sigmaSquared = Transforms.exp(logStdevSquared, true); INDArray lastTerm = x.sub(mean); lastTerm.muli(lastTerm); lastTerm.divi(sigmaSquared).divi(2); return new INDArray[] {logStdevSquared, lastTerm}; }
@Override public INDArray generateRandom(INDArray preOutDistributionParams) { INDArray output = preOutDistributionParams.dup(); activationFn.getActivation(output, true); int size = output.size(1) / 2; INDArray mean = output.get(NDArrayIndex.all(), NDArrayIndex.interval(0, size)); INDArray logStdevSquared = output.get(NDArrayIndex.all(), NDArrayIndex.interval(size, 2 * size)); INDArray sigma = Transforms.exp(logStdevSquared, true); Transforms.sqrt(sigma, false); INDArray e = Nd4j.randn(sigma.shape()); return e.muli(sigma).addi(mean); //mu + sigma * N(0,1) ~ N(mu,sigma^2) }