Refine search
public static void main(String[] args) { INDArray nd = Nd4j.create(new float[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, new int[]{2, 6}); INDArray nd2 = Nd4j.create(new float[]{15,16,17,18,19,20,21,22,23,24,25,26,27,28}, new int[]{2, 7}); INDArray ndv; // a placeholder variable to print out and leave the original data unchanged //this normalizes data and helps activate artificial neurons in deep-learning nets and assigns it to var ndv ndv = sigmoid(nd); System.out.println(ndv); //this gives you absolute value ndv = abs(nd); System.out.println(ndv); //a hyperbolic function to transform data much like sigmoid. ndv = tanh(nd); System.out.println(ndv); // ndv = hardTanh(nd); // System.out.println(ndv); //exponentiation ndv = exp(nd); System.out.println(ndv); //square root ndv = sqrt(nd); System.out.println(ndv); } }
/** * Return a reduced basis set that covers a certain fraction of the variance of the data * @param variance The desired fractional variance (0 to 1), it will always be greater than the value. * @return The basis vectors as columns, size <i>N</i> rows by <i>ndims</i> columns, where <i>ndims</i> is less than or equal to <i>N</i> */ public INDArray reducedBasis(double variance) { INDArray vars = Transforms.pow(eigenvalues, -0.5, true); double res = vars.sumNumber().doubleValue(); double total = 0.0; int ndims = 0; for (int i = 0; i < vars.columns(); i++) { ndims++; total += vars.getDouble(i); if (total / res > variance) break; } INDArray result = Nd4j.create(eigenvectors.rows(), ndims); for (int i = 0; i < ndims; i++) result.putColumn(i, eigenvectors.getColumn(i)); return result; }
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } INDArray scoreArr; //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup())); INDArray output = activationFn.getActivation(preOutput.dup(), true); scoreArr = Transforms.log(output.addi(1.0).divi(labels.add(1.0)), false); scoreArr = scoreArr.muli(scoreArr).divi(labels.size(1)); //Weighted loss function if (weights != null) { if (weights.length() != output.size(1)) { throw new IllegalStateException("Weights vector (length " + weights.length() + ") does not match output.size(1)=" + output.size(1)); } scoreArr.muliRowVector(weights); } if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
/** * This method calculates 'phi' which is the probability * density function (see Bishop 23) * @param diffSquared This is the 'x-mu' term of the Gaussian distribution (distance between 'x' and the mean value of the distribution). * @param sigma This is the standard deviation of the Gaussian distribution. * @return This returns an array of shape [nsamples, nlabels, ndistributions] which contains the probability density (phi) for each of the * samples * labels * distributions for the given x, sigma, mu. */ private INDArray phi(INDArray diffSquared, INDArray sigma) { // 1/sqrt(2PIs^2) * e^((in-u)^2/2*s^2) INDArray minustwovariance = sigma.mul(sigma).muli(2).negi(); // This is phi_i(x,mu,sigma) INDArray likelihoods = Transforms.exp(diffSquared.divi(minustwovariance)) .divi(Transforms.pow(sigma.mul(SQRT_TWO_PI), (double) mLabelWidth)); return likelihoods; }
print("4x5 Random between zero and one", fourByFiveRandomZeroToOne); INDArray pow = pow(fourByFiveRandomZeroToOne, 5); print("5th power of array", pow); INDArray power = pow(fourByFiveRandomZeroToOne, secondArray); print("Vector power", power); INDArray sqrt = sqrt(fourByFiveRandomZeroToOne); print("Vector square root", sqrt); INDArray sin = sin(fourByFiveRandomZeroToOne); print("Vector sin", sin); INDArray log = log(fourByFiveRandomZeroToOne); print("Vector log", log); INDArray abs = abs(fourByFiveRandomZeroToOne); print("Vector abs", abs); INDArray ceil = ceil(fourByFiveRandomZeroToOne); print("Vector ceil", ceil); INDArray floor = floor(fourByFiveRandomZeroToOne); print("Vector floor", floor); INDArray round = round(fourByFiveRandomZeroToOne); print("Vector round", round);
long nSamples = labels.size(0); INDArray output = activationFn.getActivation(preOutput.dup(), false); INDArray gradient = Nd4j.zeros(nSamples, preOutput.columns()); INDArray variance = mdc.sigma.mul(mdc.sigma); INDArray minustwovariance = variance.mul(2).negi(); INDArray normalPart = mdc.alpha.div(Transforms.pow(mdc.sigma.mul(SQRT_TWO_PI), mLabelWidth)); INDArray exponent = labelsMinusMuSquared.div(minustwovariance); INDArray exponentMax = exponent.max(1); exponent.subiColumnVector(exponentMax); INDArray pi = Transforms.exp(exponent).muli(normalPart); INDArray piDivisor = pi.sum(1); pi.diviColumnVector(piDivisor); INDArray dLdZMu = Nd4j.create(nSamples, mMixtures, mLabelWidth); for (int k = 0; k < mLabelWidth; k++) { dLdZMu.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(k)},
throw new IllegalArgumentException("You have to provide at least one of scoreOutput or gradientOutput!"); if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); final Double locNormFactor = normFactor.getDouble(i); final INDArray operandA = Nd4j.ones(shape[1], shape[0]).mmul(locCfn); final INDArray operandB = operandA.transpose(); final INDArray pairwiseSub = Transforms.exp(operandA.sub(operandB)); final INDArray selection = locPositive.transpose().mmul(locNegative);
@Override public void update(INDArray gradient, String paramType) { INDArray yGrads = gradient; gains = gains.add(.2).muli(sign(yGrads)).neqi(sign(yIncs)) .addi(gains.mul(0.8).muli(sign(yGrads)).neqi(sign(yIncs))); BooleanIndexing.applyWhere(gains, Conditions.lessThan(minGain), new Value(minGain)); INDArray gradChange = gains.mul(yGrads); if (useAdaGrad) { if (adaGrad == null) { adaGrad = new AdaGrad(gradient.shape(), learningRate); adaGrad.setStateViewArray(Nd4j.zeros(gradient.shape()).reshape(1, gradChange.length()), gradChange.shape(), gradient.ordering(), true); } gradChange = adaGrad.getGradient(gradChange, 0); } else { gradChange.muli(learningRate); } yIncs.muli(momentum).subi(gradChange); Y.addi(yIncs); }
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); if (activationFn instanceof ActivationSoftmax) { INDArray logsoftmax = Nd4j.getExecutioner().execAndReturn(new LogSoftMax(preOutput.dup())); scoreArr = logsoftmax.muli(labels); .addFloatingPointArguments(clipEps, 1.0-clipEps) .build(); Nd4j.getExecutioner().exec(op); scoreArr = Transforms.log(output, true).muli(labels); INDArray secondTerm = output.rsubi(1); Transforms.log(secondTerm, false); secondTerm.muli(labels.rsub(1)); scoreArr.addi(secondTerm);
DataSet next = iterator.next(); runningTotal += next.numExamples(); batchCount = next.getFeatures().size(0); if (mean == null) { mean = next.getFeatureMatrix().mean(0); std = (batchCount == 1) ? Nd4j.zeros(mean.shape()) : Transforms.pow(next.getFeatureMatrix().std(0), 2); std.muli(batchCount); } else { INDArray meanB = next.getFeatureMatrix().mean(0); INDArray deltaSq = Transforms.pow(meanB.subRowVector(mean), 2); INDArray deltaSqScaled = deltaSq.mul(((float) runningTotal - batchCount) * batchCount / (float) runningTotal); INDArray mtwoB = Transforms.pow(next.getFeatureMatrix().std(0), 2); mtwoB.muli(batchCount); std = std.add(mtwoB); std.divi(runningTotal); std = Transforms.sqrt(std); std.addi(Nd4j.scalar(Nd4j.EPS_THRESHOLD)); if (std.min(1) == Nd4j.scalar(Nd4j.EPS_THRESHOLD)) logger.info("API_INFO: Std deviation found to be zero. Transform will round upto epsilon to avoid nans."); iterator.reset();
@Override public void applyUpdater(INDArray gradient, int iteration, int epoch) { if (lastGradient == null) throw new IllegalStateException("Updater has not been initialized with view state"); double learningRate = config.getLearningRate(iteration, epoch); double rmsDecay = config.getRmsDecay(); double epsilon = config.getEpsilon(); lastGradient.muli(rmsDecay).addi(gradient.mul(gradient).muli(1 - rmsDecay)); // lr * gradient / (sqrt(cache) + 1e-8) gradient.muli(learningRate).divi(Transforms.sqrt(lastGradient.dup(gradientReshapeOrder), false).addi(epsilon)); } }
out.putScalar(i, r.nextInt((int) shape[1]), j, 1.0); return Nd4j.createUninitialized(shape,order).assign(1.0); case BINARY: return Nd4j.getExecutioner().exec(new BernoulliDistribution(Nd4j.createUninitialized(shape, order), 0.5)); case INTEGER_0_10: return Transforms.floor(Nd4j.rand(shape).muli(10), false); case INTEGER_0_100: return Transforms.floor(Nd4j.rand(shape).muli(100), false); case INTEGER_0_1000: return Transforms.floor(Nd4j.rand(shape).muli(1000), false); case INTEGER_0_10000: return Transforms.floor(Nd4j.rand(shape).muli(10000), false); case INTEGER_0_100000: return Transforms.floor(Nd4j.rand(shape).muli(100000), false); default: throw new RuntimeException("Unknown enum value: " + values);
INDArray originalArray = Nd4j.linspace(1,15,15).reshape('c',3,5); //As per example 3 INDArray copyAdd = originalArray.add(1.0); System.out.println("Same object returned by add: " + (originalArray == copyAdd)); System.out.println("Original array after originalArray.add(1.0):\n" + originalArray); INDArray inPlaceAdd = originalArray.addi(1.0); System.out.println(); System.out.println("Same object returned by addi: " + (originalArray == inPlaceAdd)); //addi returns the exact same Java object originalArray = Nd4j.linspace(1,15,15).reshape('c',3,5); INDArray random = Nd4j.rand(3,5); //See example 2; we have a 3x5 with uniform random (0 to 1) values System.out.println("Element-wise tanh on random array:\n" + Transforms.tanh(random)); System.out.println("Element-wise power (x^3.0) on random array:\n" + Transforms.pow(random,3.0)); System.out.println("Element-wise scalar max (with scalar 0.5):\n" + Transforms.max(random,0.5)); INDArray sinx = Nd4j.getExecutioner().execAndReturn(new Sin(random.dup())); System.out.println("Element-wise sin(x) operation:\n" + sinx);
INDArray oneSided = Nd4j.onesLike(h); if(and(lowerBound.eq(Double.NEGATIVE_INFINITY),upperBound.eq(Double.POSITIVE_INFINITY)).sumNumber().doubleValue() > 0) { return new INDArray[] {h,oneSided}; INDArray upperBound2 = upperBound.sub(x); INDArray central = and(greaterThanOrEqual(lowerDist,hTotal),greaterThanOrEqual(upperBound2,hTotal)); INDArray forward = and(greaterThanOrEqual(upperBound,lowerDist),not(central)); hAdjusted.put(forward,min(h.get(forward),upperBound2.get(forward).mul(0.5).divi(numSteps))); oneSided.put(forward,Nd4j.scalar(1.0)); INDArray backward = and(upperBound2.lt(lowerBound),not(central)); hAdjusted.put(backward,min(h.get(backward),lowerDist.get(backward).mul(0.5).divi(numSteps))); oneSided.put(backward,Nd4j.scalar(1.0)); INDArray minDist = min(upperBound2,lowerDist).divi(numSteps); INDArray adjustedCentral = and(not(central),lessThanOrEqual(abs(hAdjusted),minDist)); hAdjusted.put(adjustedCentral,minDist.get(adjustedCentral)); oneSided.put(adjustedCentral,Nd4j.scalar(0.0));
INDArray currShape = NDArrayUtil.toNDArray(arr.shape()); INDArray startIndex = Transforms.floor(currShape.sub(shapeMatrix).divi(Nd4j.scalar(2))); INDArray endIndex = startIndex.add(shapeMatrix); INDArrayIndex[] indexes = Indices.createFromStartAndEnd(startIndex, endIndex); IComplexNDArray ret = Nd4j.createComplex(new int[] {(int) shapeMatrix.getDouble(0)}); int start = (int) startIndex.getDouble(0); int end = (int) endIndex.getDouble(0);
public INDArray getGradient(INDArray gradient, int slice, int[] shape) { boolean historicalInitialized = false; INDArray sqrtHistory; if (this.historicalGradient == null) { this.historicalGradient = Nd4j.zeros(shape).add(epsilon); historicalInitialized = true; } else if (!this.historicalGradient.isVector() && this.historicalGradient.slice(slice).length() != gradient.length()) throw new IllegalArgumentException("Illegal gradient"); if (historicalGradient.isVector()) sqrtHistory = sqrt(historicalGradient); else sqrtHistory = !historicalInitialized ? sqrt(historicalGradient.slice(slice)) : historicalGradient; INDArray learningRates; try { learningRates = sqrtHistory.rdivi(learningRate); } catch (ArithmeticException ae) { learningRates = sqrtHistory.rdivi(learningRate + epsilon); } if (gradient.length() != learningRates.length()) gradient.muli(learningRates.slice(slice)); else gradient.muli(learningRates); this.historicalGradient.slice(slice).addi(gradient.mul(gradient)); numIterations++; //ensure no zeros return gradient; }
int[] dimensions = getDimensions(x); INDArray norm = x.norm2(dimensions); INDArray norm3 = Transforms.pow(norm, 3.0, true); Transforms.max(norm, eps, false); // in case of div/0 Transforms.max(norm3, eps, false); if (x.rank() == 2) { dLdx = epsilon.divColumnVector(norm); INDArray xDivNorm3 = x.divColumnVector(norm3); dLdx.subi(xDivNorm3.muliColumnVector(epsilon.mul(x).sum(1))); INDArray xDivNorm3 = Nd4j.createUninitialized(x.shape(), x.ordering()); Nd4j.getExecutioner().exec(new BroadcastDivOp(x, norm3, xDivNorm3, 0)); Nd4j.getExecutioner().exec(new BroadcastMulOp(xDivNorm3, dx, xDivNorm3, 0)); dLdx = Nd4j.createUninitialized(epsilon.shape(), epsilon.ordering()); Nd4j.getExecutioner().exec(new BroadcastDivOp(epsilon, norm, dLdx, 0)); dLdx.subi(xDivNorm3);
INDArray mean = data.mean(0); INDArray variance = data.var(false, 0); long count = data.size(0); INDArray deltaSquared = Transforms.pow(mean.subRowVector(runningMean), 2); INDArray mB = variance.muli(count); runningVariance.muli(runningCount).addiRowVector(mB)