@Override public INDArray sum(@NonNull INDArray result, int... dimension) { return Nd4j.getExecutioner().exec(new Sum(this, null, result), dimension); }
@Override public List<SDVariable> doDiff(List<SDVariable> i_v1) { //Out = sum(in) // dL/dIn = dL/dOut * dOut/dIn // = dL/dOut * 1 // But broadcast to shape of the input int origRank = Shape.rankFromShape(arg().getShape()); //TODO shape may not always be defined? SDVariable broadcastable = sameDiff.f().reductionBroadcastableWithOrigShape(origRank, dimensions, i_v1.get(0)); SDVariable ret = sameDiff.onesLike(arg()).mul(broadcastable); return Arrays.asList(ret); }
public SDVariable sum(SDVariable i_x, int... dimensions) { return new Sum(sameDiff(), i_x, dimensions).outputVariables()[0]; }
/** * Returns the sum along the last dimension of this ndarray * * @param dimension the dimension to getScalar the sum along * @return the sum along the specified dimension of this ndarray */ @Override public INDArray sum(int... dimension) { return Nd4j.getExecutioner().exec(new Sum(this), dimension); }
/** * Returns the sum along the last dimension of this ndarray * * @param dimension the dimension to getScalar the sum along * @return the sum along the specified dimension of this ndarray */ @Override public INDArray sum(int... dimension) { return Nd4j.getExecutioner().exec(new Sum(this), dimension); }
@Override public INDArray sum(INDArray result, int... dimension) { return Nd4j.getExecutioner().exec(new Sum(this, null, result), dimension); }
@Override public void exec() { this.mean = Nd4j.getExecutioner().execAndReturn(new Mean(x)).getFinalResult().doubleValue(); INDArray xMinusMean = x.sub(mean); double sum = Nd4j.getExecutioner().execAndReturn(new Sum(xMinusMean)).getFinalResult().doubleValue(); this.finalResult = sum; }
public double varManual(INDArray x, double mean) { INDArray xSubMean = x.sub(mean); INDArray squared = xSubMean.muli(xSubMean); double accum = Nd4j.getExecutioner().execAndReturn(new Sum(squared)).getFinalResult().doubleValue(); return accum / x.ravel().length(); }
@Override public void exec() { if (biasCorrected) this.bias = Nd4j.getExecutioner().execAndReturn(new Bias(x)).getFinalResult().doubleValue(); this.mean = Nd4j.getExecutioner().execAndReturn(new Mean(x)).getFinalResult().doubleValue(); INDArray xSubMean = x.sub(mean); INDArray squared = xSubMean.muli(xSubMean); double accum = Nd4j.getExecutioner().execAndReturn(new Sum(squared)).getFinalResult().doubleValue(); getAndSetFinalResult(accum); this.z = Nd4j.scalar(this.finalResult); }
private static INDArray getIntArgMaxArray(INDArray array) { int maxValidIndex = Nd4j.getExecutioner().exec(new Sum(array), 0).gt(0).sumNumber().intValue(); INDArray argMax = Nd4j.getExecutioner().exec(new IMax(array), 0); return maxValidIndex > 0 ? argMax.get(NDArrayIndex.all(), NDArrayIndex.interval(0, maxValidIndex)) : argMax; }
private static int[] getIntArgMaxArray(INDArray array) { int maxValidIndex = Nd4j.getExecutioner().exec(new Sum(array), 0).gt(0).sumNumber().intValue(); INDArray argMax = Nd4j.getExecutioner().exec(new IMax(array), 0); return maxValidIndex > 0 ? argMax.get(NDArrayIndex.all(), NDArrayIndex.interval(0, maxValidIndex)).data().asInt() : argMax.data().asInt(); } }
ret = new Sum(x, y, z,x.length()); break; case "max":