public SDVariable norm2(SDVariable i_x, int... dimensions) { return new Norm2(sameDiff(), i_x, dimensions).outputVariables()[0]; }
@Override public List<SDVariable> doDiff(List<SDVariable> i_v1) { //d norm2(in)/dx = x / norm2(in) SDVariable norm2 = outputVariables()[0]; int origRank = Shape.rankFromShape(arg().getShape()); //TODO shape may not always be defined? SDVariable broadcastableNorm2 = f().reductionBroadcastableWithOrigShape(origRank, dimensions, norm2); SDVariable broadcastableGradOut = f().reductionBroadcastableWithOrigShape(origRank, dimensions, i_v1.get(0)); SDVariable ret = arg().div(broadcastableNorm2).mul(broadcastableGradOut); return Arrays.asList(ret); }
@Override public INDArray noOp() { return Transforms.abs(x()); }
@Override public Op opForDimension(int index, int... dimension) { INDArray xAlongDimension = x.tensorAlongDimension(index, dimension); Norm2 ret; if (y() != null) ret = new Norm2(xAlongDimension, y.tensorAlongDimension(index, dimension), xAlongDimension.length()); else ret = new Norm2(x.tensorAlongDimension(index, dimension)); ret.setApplyFinalTransform(applyFinalTransform()); return ret; }
/** * Returns the norm2 along the specified dimension * * @param dimension the dimension to getScalar the norm2 along * @return the norm2 along the specified dimension */ @Override public INDArray norm2(int... dimension) { return Nd4j.getExecutioner().exec(new Norm2(this), dimension); }
@Override public double getAndSetFinalResult(double accum) { if (applyFinalTransform()) { double d = FastMath.sqrt(accum); this.finalResult = d; return d; } else return accum; }
@Override public Op opForDimension(int index, int dimension) { INDArray xAlongDimension = x.vectorAlongDimension(index, dimension); Norm2 ret; if (y() != null) ret = new Norm2(xAlongDimension, y.vectorAlongDimension(index, dimension), xAlongDimension.length()); else ret = new Norm2(x.vectorAlongDimension(index, dimension)); ret.setApplyFinalTransform(applyFinalTransform()); return ret; }
/** * Returns the norm2 along the specified dimension * * @param dimension the dimension to getScalar the norm2 along * @return the norm2 along the specified dimension */ @Override public INDArray norm2(int... dimension) { return Nd4j.getExecutioner().exec(new Norm2(this), dimension); }
@Override public float getAndSetFinalResult(float accum) { if (applyFinalTransform()) { float f = (float) FastMath.sqrt(accum); this.finalResult = f; return f; } else return accum; }
case RenormalizeL2PerParamType: for (INDArray g : gradient.gradientForVariable().values()) { double l2 = Nd4j.getExecutioner().execAndReturn(new Norm2(g)).getFinalResult().doubleValue(); g.divi(l2);
@Override public float calculateFinalResult(float accum, long n) { if (applyFinalTransform()) return (float) FastMath.sqrt(accum); return accum; } }
@Override public INDArray noOp() { return Transforms.abs(x()); }
break; case "norm2": ret = new Norm2(x, y,z, x.length()); break; case "prod":
@Override public double calculateFinalResult(double accum, long n) { if (applyFinalTransform()) return FastMath.sqrt(accum); return accum; }