/** * In place addition of a column vector * * @param columnVector the column vector to add * @return the result of the addition */ @Override public INDArray subColumnVector(INDArray columnVector) { return dup().subiColumnVector(columnVector); }
INDArray exponent = labelsMinusMuSquared.div(minustwovariance); INDArray exponentMax = exponent.max(1); exponent.subiColumnVector(exponentMax); INDArray pi = Transforms.exp(exponent).muli(normalPart); INDArray piDivisor = pi.sum(1);
/** * In place addition of a column vector * * @param columnVector the column vector to add * @return the result of the addition */ @Override public INDArray subColumnVector(INDArray columnVector) { return dup().subiColumnVector(columnVector); }
/** * Calculate dL/dz for softmax activation function, from dL/da and a, where<br> * a: output activations<br> * dL/da: derivative of loss function with respect to the output activations<br> * <b>Note</b>: This version WILL modify both input arrays (for efficiency). If this is not acceptable, use * {@link #dLdZsoftmax(INDArray, INDArray)}. * * @param dlda derivative of loss function with respect to the output activations (shape [minibatchSize, nOut]) * @param a output activations array (shape [minibatchSize, nOut]) * @deprecated No longer used */ @Deprecated public static INDArray dLdZsoftmaxi(INDArray dlda, INDArray a) { INDArray x = a.mul(dlda).sum(1); return a.muli(dlda.subiColumnVector(x)); }
Nd4j.getExecutioner().exec(new Log(logRowSumExp)); INDArray logsoftmax = xMinusRowMax.subiColumnVector(logRowSumExp); if (this.z != null) z.assign(logsoftmax);
INDArray exponent = labelsMinusMuSquared.div(minustwovariance); INDArray exponentMax = exponent.max(1); exponent.subiColumnVector(exponentMax); INDArray pi = Transforms.exp(exponent).muli(normalPart); INDArray piDivisor = pi.sum(1);