/** * Ones like * * @param arr the array to create the ones like * @return ones in the shape of the given array */ public static INDArray onesLike(INDArray arr) { return ones(arr.shape()); }
INDArray ones = Nd4j.ones(nRows, nColumns);
public double getGradient(double gradient, int column, int[] shape) { boolean historicalInitialized = false; if (this.historicalGradient == null) { this.historicalGradient = Nd4j.ones(shape); historicalInitialized = true; } double sqrtHistory = !historicalInitialized ? Math.sqrt(historicalGradient.getDouble(column)) : historicalGradient.getDouble(column); double learningRates = learningRate / (sqrtHistory + epsilon); double adjustedGradient = gradient * (learningRates); historicalGradient.putScalar(column, historicalGradient.getDouble(column) + gradient * gradient); numIterations++; //ensure no zeros return adjustedGradient; }
System.out.println(allZeros); INDArray allOnes = Nd4j.ones(nRows, nColumns); System.out.println("\nNd4j.ones(nRows, nColumns)"); System.out.println(allOnes); INDArray threeDimArray = Nd4j.ones(3,4,5); //3x4x5 INDArray INDArray fourDimArray = Nd4j.ones(3,4,5,6); //3x4x5x6 INDArray INDArray fiveDimArray = Nd4j.ones(3,4,5,6,7); //3x4x5x6x7 INDArray System.out.println("\n\n\nCreating INDArrays with more dimensions:"); System.out.println("3d array shape: " + Arrays.toString(threeDimArray.shape()));
@Override public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } final INDArray grad = Nd4j.ones(labels.shape()); calculate(labels, preOutput, activationFn, mask, null, grad); return grad; }
@Override public Pair<INDArray, INDArray> backprop(INDArray in, INDArray epsilon) { INDArray dLdz = Nd4j.ones(in.shape()); BooleanIndexing.replaceWhere(dLdz, alpha, Conditions.lessThanOrEqual(0.0)); dLdz.muli(epsilon); return new Pair<>(dLdz, null); }
@Override public Pair<Double, INDArray> computeGradientAndScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { final INDArray scoreArr = Nd4j.create(labels.size(0), 1); final INDArray grad = Nd4j.ones(labels.shape()); calculate(labels, preOutput, activationFn, mask, scoreArr, grad); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return new Pair<>(score, grad); }
public static INDArray mergePerOutputMasks2d(long[] outShape, INDArray[] arrays, INDArray[] masks) { val numExamplesPerArr = new long[arrays.length]; for (int i = 0; i < numExamplesPerArr.length; i++) { numExamplesPerArr[i] = arrays[i].size(0); } INDArray outMask = Nd4j.ones(outShape); //Initialize to 'all present' (1s) int rowsSoFar = 0; for (int i = 0; i < masks.length; i++) { long thisRows = numExamplesPerArr[i]; //Mask itself may be null -> all present, but may include multiple examples if (masks[i] == null) { continue; } outMask.put(new INDArrayIndex[] {NDArrayIndex.interval(rowsSoFar, rowsSoFar + thisRows), NDArrayIndex.all()}, masks[i]); rowsSoFar += thisRows; } return outMask; }
INDArray values = Nd4j.ones(3,4); SDVariable variable = sd.var("myVariable", values);
public AdaGrad createSubset(int index) { if (historicalGradient == null) this.historicalGradient = Nd4j.ones(shape); if (Shape.isMatrix(shape)) { AdaGrad a = new AdaGrad(1, historicalGradient.columns()); //grab only the needed elements INDArray slice = historicalGradient.slice(index).dup(); a.historicalGradient = slice; a.setLearningRate(learningRate); return a; } else { AdaGrad a = new AdaGrad(1, 1); //grab only the needed elements INDArray slice = Nd4j.scalar(historicalGradient.getDouble(index)); a.historicalGradient = slice; a.setLearningRate(learningRate); return a; } } }
/** * Merge the vectors and append a bias. * Each vector must be either row or column vectors. * An exception is thrown for inconsistency (mixed row and column vectors) * * @param vectors the vectors to merge * @return the merged ndarray appended with the bias */ @Override public INDArray appendBias(INDArray... vectors) { int size = 0; for (INDArray vector : vectors) { size += vector.rows(); } INDArray result = Nd4j.create(size + 1, vectors[0].columns()); int index = 0; for (INDArray vector : vectors) { INDArray put = toFlattened(vector, Nd4j.ones(1)); result.put(new INDArrayIndex[] {NDArrayIndex.interval(index, index + vector.rows() + 1), NDArrayIndex.interval(0, vectors[0].columns())}, put); index += vector.rows(); } return result; }
INDArray values = Nd4j.ones(3,4); var3.setArray(values);
public INDArray adjustMasks(INDArray label, INDArray labelMask, int minorityLabel, double targetDist) { labelMask = Nd4j.ones(label.size(0), label.size(2));
final Double locNormFactor = normFactor.getDouble(i); final INDArray operandA = Nd4j.ones(shape[1], shape[0]).mmul(locCfn); final INDArray operandB = operandA.transpose();
INDArray mask = (needMask && maskRank != 3 ? Nd4j.ones(totalExamples, maxLength) : null);
print("One dimensional zeros", oneDZeros); INDArray threeByFourOnes = Nd4j.ones(3, 4); print("3x4 ones", threeByFourOnes);
/** * Ones like * * @param arr the array to create the ones like * @return ones in the shape of the given array */ public static INDArray onesLike(INDArray arr) { return ones(arr.shape()); }
private INDArray rowOfLogTransitionMatrix(int k) { INDArray row = Nd4j.ones(1, states).muli(logOfDiangnalTProb); row.putScalar(k, logMetaInstability); return row; }
@Override public Pair<INDArray, INDArray> backprop(INDArray in, INDArray epsilon) { INDArray dLdz = Nd4j.ones(in.shape()); BooleanIndexing.replaceWhere(dLdz, alpha, Conditions.lessThanOrEqual(0.0)); dLdz.muli(epsilon); return new Pair<>(dLdz, null); }