Refine search
/** * Sin function * @param in * @param copy * @return */ public static INDArray sin(INDArray in, boolean copy) { return Nd4j.getExecutioner().execAndReturn(new Sin((copy ? in.dup() : in))); }
private INDArray copyIfNecessary(INDArray arr) { //See also: Shape.toMmulCompatible - want same conditions here and there //Check if matrix values are contiguous in memory. If not: dup //Contiguous for c if: stride[0] == shape[1] and stride[1] = 1 //Contiguous for f if: stride[0] == 1 and stride[1] == shape[0] if (arr.ordering() == 'c' && (arr.stride(0) != arr.size(1) || arr.stride(1) != 1)) return arr.dup(); else if (arr.ordering() == 'f' && (arr.stride(0) != 1 || arr.stride(1) != arr.size(0))) return arr.dup(); else if (arr.elementWiseStride() < 1) return arr.dup(); return arr; }
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } INDArray scoreArr; //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup())); INDArray output = activationFn.getActivation(preOutput.dup(), true); scoreArr = output.subi(labels); Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("abs", scoreArr)); //Weighted loss function if (weights != null) { if (weights.length() != output.size(1)) { throw new IllegalStateException("Weights vector (length " + weights.length() + ") does not match output.size(1)=" + output.size(1)); } scoreArr.muliRowVector(weights); } if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
/** * Sin function * @param in * @param copy * @return */ public static INDArray atanh(INDArray in, boolean copy) { return Nd4j.getExecutioner().execAndReturn(new ATanh((copy ? in.dup() : in))); }
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } /* y_hat is -1 or 1 hinge loss is max(0,1-y_hat*y) */ INDArray output = activationFn.getActivation(preOutput.dup(), true); INDArray scoreArr = output.muli(labels); //y*yhat scoreArr.rsubi(1.0); //1 - y*yhat if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; // 1 - y*yhat }
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } INDArray scoreArr; //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup())); INDArray output = activationFn.getActivation(preOutput.dup(), true); scoreArr = output.rsubi(labels).divi(labels); Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("abs", scoreArr)); scoreArr.muli(100.0 / labels.size(1)); //Weighted loss function if (weights != null) { if (weights.length() != output.size(1)) { throw new IllegalStateException("Weights vector (length " + weights.length() + ") does not match output.size(1)=" + output.size(1)); } scoreArr.muliRowVector(weights); } if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
/** * * @param in * @param copy * @return */ public static INDArray cos(INDArray in, boolean copy) { return Nd4j.getExecutioner().execAndReturn(new Cos((copy ? in.dup() : in))); }
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } /* y_hat is -1 or 1 hinge loss is max(0,1-y_hat*y) */ //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup())); INDArray output = activationFn.getActivation(preOutput.dup(), true); INDArray scoreArr = output.muli(labels); //y*yhat scoreArr.rsubi(1.0); //1 - y*yhat if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; // 1 - y*yhat }
@Override public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } INDArray output = activationFn.getActivation(preOutput.dup(), true); INDArray actSubPredicted = labels.sub(output); INDArray dLda = Nd4j.getExecutioner().execAndReturn(new Sign(actSubPredicted)); INDArray absLabels = Nd4j.getExecutioner().execAndReturn(new Abs(labels.dup())); dLda.divi(absLabels).muli(-100.0 / labels.size(1)); //Weighted loss function if (weights != null) { dLda.muliRowVector(weights); } if (mask != null && LossUtil.isPerOutputMasking(dLda, mask)) { //For *most* activation functions: we don't actually need to mask dL/da in addition to masking dL/dz later //but: some, like softmax, require both (due to dL/dz_i being a function of dL/da_j, for i != j) //We could add a special case for softmax (activationFn instanceof ActivationSoftmax) but that would be // error prone - but buy us a tiny bit of performance LossUtil.applyMask(dLda, mask); } INDArray gradient = activationFn.backprop(preOutput, dLda).getFirst(); //TODO activation functions with params if (mask != null) { LossUtil.applyMask(gradient, mask); } return gradient; }
/** * * @param in * @param copy * @return */ public static INDArray cosh(INDArray in, boolean copy) { return Nd4j.getExecutioner().execAndReturn(new Cosh((copy ? in.dup() : in))); }
private INDArray copyIfNeccessary(INDArray arr) { //See also: Shape.toMmulCompatible - want same conditions here and there //Check if matrix values are contiguous in memory. If not: dup //Contiguous for c if: stride[0] == shape[1] and stride[1] = 1 //Contiguous for f if: stride[0] == 1 and stride[1] == shape[0] if (!Nd4j.allowsSpecifyOrdering() && arr.ordering() == 'c' && (arr.stride(0) != arr.size(1) || arr.stride(1) != 1)) return arr.dup(); else if (arr.ordering() == 'f' && (arr.stride(0) != 1 || arr.stride(1) != arr.size(0))) return arr.dup(); else if (arr.elementWiseStride() < 0) return arr.dup(); return arr; } }
@Override public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } INDArray output = activationFn.getActivation(preOutput.dup(), true); INDArray outSubLabels = output.sub(labels); INDArray dLda = Nd4j.getExecutioner().execAndReturn(new Sign(outSubLabels)); if (weights != null) { dLda.muliRowVector(weights); } if (mask != null && LossUtil.isPerOutputMasking(dLda, mask)) { //For *most* activation functions: we don't actually need to mask dL/da in addition to masking dL/dz later //but: some, like softmax, require both (due to dL/dz_i being a function of dL/da_j, for i != j) //We could add a special case for softmax (activationFn instanceof ActivationSoftmax) but that would be // error prone - but buy us a tiny bit of performance LossUtil.applyMask(dLda, mask); } //dL/dz INDArray gradients = activationFn.backprop(preOutput, dLda).getFirst(); //TODO activation function param gradients if (mask != null) { LossUtil.applyMask(gradients, mask); } return gradients; }
/** * Sinh function * @param in * @param copy * @return */ public static INDArray sinh(INDArray in, boolean copy) { return Nd4j.getExecutioner().execAndReturn(new Sinh((copy ? in.dup() : in))); }
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } /* mean of (yhat - y * log(yhat)) */ //INDArray postOutput = Nd4j.utioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup())); INDArray postOutput = activationFn.getActivation(preOutput.dup(), true); INDArray scoreArr = Transforms.log(postOutput); scoreArr.muli(labels); scoreArr = postOutput.sub(scoreArr); if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); if (activationFn instanceof ActivationSoftmax) { INDArray logsoftmax = Nd4j.getExecutioner().execAndReturn(new LogSoftMax(preOutput.dup())); scoreArr = logsoftmax.muli(labels); INDArray output = activationFn.getActivation(preOutput.dup(), true); if (clipEps > 0.0) { CustomOp op = DynamicCustomOp.builder("clipbyvalue") .addFloatingPointArguments(clipEps, 1.0-clipEps) .build(); Nd4j.getExecutioner().exec(op);
/** * * @param in * @param copy * @return */ public static INDArray softmax(INDArray in, boolean copy) { return Nd4j.getExecutioner().execAndReturn(new OldSoftMax(((copy ? in.dup() : in)))); }
protected INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } INDArray output = activationFn.getActivation(preOutput.dup(), true); INDArray scoreArr = output.rsubi(labels); scoreArr = scoreArr.muli(scoreArr); //Weighted loss function if (weights != null) { if (weights.length() != output.size(1)) { throw new IllegalStateException("Weights vector (length " + weights.length() + ") does not match output.size(1)=" + output.size(1)); } scoreArr.muliRowVector(weights); } //Loss function with masking if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
@Override public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); INDArray output = activationFn.getActivation(preOutput.dup(), true); if (clipEps > 0.0) { CustomOp op = DynamicCustomOp.builder("clipbyvalue") .addFloatingPointArguments(clipEps, 1.0-clipEps) .build(); Nd4j.getExecutioner().exec(op); INDArray denominator = Nd4j.getExecutioner().execAndReturn(new TimesOneMinus(output)); // output * (1-output) INDArray dLda = numerator.divi(denominator);
@Override public void revertFeatures(INDArray features) { Nd4j.getExecutioner().execAndReturn(new BroadcastAddOp(features.dup(), VGG_MEAN_OFFSET_BGR, features, 1)); }
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } INDArray output = activationFn.getActivation(preOutput.dup(), true); // Clip output and labels to be between Nd4j.EPS_THREsHOLD and 1, i.e. a valid non-zero probability output = Transforms.min(Transforms.max(output, Nd4j.EPS_THRESHOLD, false), 1, false); labels = Transforms.min(Transforms.max(labels, Nd4j.EPS_THRESHOLD, true), 1, false); INDArray logRatio = Transforms.log(output.rdivi(labels), false); INDArray scoreArr = logRatio.muli(labels); if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }