private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } INDArray output = activationFn.getActivation(preOutput.dup(), true); if(activationFn instanceof ActivationSoftmax && softmaxClipEps > 0.0){ BooleanIndexing.replaceWhere(output, softmaxClipEps, Conditions.lessThan(softmaxClipEps)); BooleanIndexing.replaceWhere(output, 1.0-softmaxClipEps, Conditions.greaterThan(1.0-softmaxClipEps)); } INDArray scoreArr = Transforms.log(output, false).muli(labels); //Weighted loss function if (weights != null) { if (weights.length() != scoreArr.size(1)) { throw new IllegalStateException("Weights vector (length " + weights.length() + ") does not match output.size(1)=" + preOutput.size(1)); } scoreArr.muliRowVector(weights); } if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
public static void main(String[] args){ int nRows = 3; int nCols = 5; long rngSeed = 12345; //Generate random numbers between -1 and +1 INDArray random = Nd4j.rand(nRows, nCols, rngSeed).muli(2).subi(1); System.out.println("Array values:"); System.out.println(random); //For example, we can conditionally replace values less than 0.0 with 0.0: INDArray randomCopy = random.dup(); BooleanIndexing.replaceWhere(randomCopy, 0.0, Conditions.lessThan(0.0)); System.out.println("After conditionally replacing negative values:\n" + randomCopy); //Or conditionally replace NaN values: INDArray hasNaNs = Nd4j.create(new double[]{1.0,1.0,Double.NaN,1.0}); BooleanIndexing.replaceWhere(hasNaNs,0.0, Conditions.isNan()); System.out.println("hasNaNs after replacing NaNs with 0.0:\n" + hasNaNs); //Or we can conditionally copy values from one array to another: randomCopy = random.dup(); INDArray tens = Nd4j.valueArrayOf(nRows, nCols, 10.0); BooleanIndexing.replaceWhere(randomCopy, tens, Conditions.lessThan(0.0)); System.out.println("Conditionally copying values from array 'tens', if original value is less than 0.0\n" + randomCopy); //One simple task is to count the number of values that match the condition MatchCondition op = new MatchCondition(random, Conditions.greaterThan(0.0)); int countGreaterThanZero = Nd4j.getExecutioner().exec(op,Integer.MAX_VALUE).getInt(0); //MAX_VALUE = "along all dimensions" or equivalently "for entire array" System.out.println("Number of values matching condition 'greater than 0': " + countGreaterThanZero); }
static INDArray invert(INDArray arr1) { return booleanOp(arr1, Conditions.equals(0)); }
@Override public INDArray computeScoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); BooleanIndexing.replaceWhere(scoreArr, 0.0, Conditions.lessThan(0.0));//max(0,1-y*yhat) return scoreArr.sum(1); }
private INDArray calculateBernoulli(INDArray minorityLabels, INDArray labelMask, double targetMinorityDist) { INDArray minorityClass = minorityLabels.dup().muli(labelMask); INDArray majorityClass = Transforms.not(minorityLabels).muli(labelMask); //all minorityLabel class, keep masks as is //presence of minoriy class and donotmask minority windows set to true return label as is if (majorityClass.sumNumber().intValue() == 0 || (minorityClass.sumNumber().intValue() > 0 && donotMaskMinorityWindows)) return labelMask; //all majority class and set to not mask all majority windows sample majority class by 1-targetMinorityDist if (minorityClass.sumNumber().intValue() == 0 && !maskAllMajorityWindows) return labelMask.muli(1 - targetMinorityDist); //Probabilities to be used for bernoulli sampling INDArray minoritymajorityRatio = minorityClass.sum(1).div(majorityClass.sum(1)); INDArray majorityBernoulliP = minoritymajorityRatio.muli(1 - targetMinorityDist).divi(targetMinorityDist); BooleanIndexing.replaceWhere(majorityBernoulliP, 1.0, Conditions.greaterThan(1.0)); //if minority ratio is already met round down to 1.0 return majorityClass.muliColumnVector(majorityBernoulliP).addi(minorityClass); }
public static void checkForNaN(INDArray z) { if (Nd4j.getExecutioner().getProfilingMode() != OpExecutioner.ProfilingMode.NAN_PANIC && Nd4j.getExecutioner().getProfilingMode() != OpExecutioner.ProfilingMode.ANY_PANIC) return; int match = 0; if (!z.isScalar()) { MatchCondition condition = new MatchCondition(z, Conditions.isNan()); match = Nd4j.getExecutioner().exec(condition, Integer.MAX_VALUE).getInt(0); } else { if (z.data().dataType() == DataBuffer.Type.DOUBLE) { if (Double.isNaN(z.getDouble(0))) match = 1; } else { if (Float.isNaN(z.getFloat(0))) match = 1; } } if (match > 0) throw new ND4JIllegalStateException("P.A.N.I.C.! Op.Z() contains " + match + " NaN value(s): "); }
@Override public DoubleTensor setWithMaskInPlace(DoubleTensor mask, Double value) { if (this.getLength() != mask.getLength()) { throw new IllegalArgumentException("The lengths of the tensor and mask must match, but got tensor length: " + this.getLength() + ", mask length: " + mask.getLength()); } INDArray maskDup = unsafeGetNd4J(mask).dup(); double trueValue = 1.0; if (value == 0.0) { trueValue = 1.0 - trueValue; maskDup.negi().addi(1); } double falseValue = 1.0 - trueValue; Nd4j.getExecutioner().exec( new CompareAndSet(maskDup, value, Conditions.equals(trueValue)) ); Nd4j.getExecutioner().exec( new CompareAndSet(tensor, maskDup, Conditions.notEquals(falseValue)) ); return this; }
private static INDArray logZ(INDArray z) { INDArray log = log(z, true); // log approaches -Infinity as z approaches zero. Replace -Infinity with the least possible value. // Caveat: does not handle +Infinity since z is assumed to be 0 <= z <= 1. switch (log.data().dataType()) { case FLOAT: BooleanIndexing.applyWhere(log, new Or(Conditions.isNan(), Conditions.isInfinite()), new StableNumber(StableNumber.Type.FLOAT)); break; case DOUBLE: BooleanIndexing.applyWhere(log, new Or(Conditions.isNan(), Conditions.isInfinite()), new StableNumber(StableNumber.Type.DOUBLE)); break; case INT: BooleanIndexing.applyWhere(log, new Or(Conditions.isNan(), Conditions.isInfinite()), new Value(-Integer.MAX_VALUE)); break; default: throw new RuntimeException("unsupported data type: " + log.data().dataType()); } return log; }
public static void checkForInf(INDArray z) { if (Nd4j.getExecutioner().getProfilingMode() != OpExecutioner.ProfilingMode.INF_PANIC && Nd4j.getExecutioner().getProfilingMode() != OpExecutioner.ProfilingMode.ANY_PANIC) return; int match = 0; if (!z.isScalar()) { MatchCondition condition = new MatchCondition(z, Conditions.isInfinite()); match = Nd4j.getExecutioner().exec(condition, Integer.MAX_VALUE).getInt(0); } else { if (z.data().dataType() == DataBuffer.Type.DOUBLE) { if (Double.isInfinite(z.getDouble(0))) match = 1; } else { if (Float.isInfinite(z.getFloat(0))) match = 1; } } if (match > 0) throw new ND4JIllegalStateException("P.A.N.I.C.! Op.Z() contains " + match + " Inf value(s)"); }
Condition condGeq = Conditions.greaterThanOrEqual(currThreshold); Condition condLeq = Conditions.lessThanOrEqual(currThreshold);
@Override public Pair<INDArray, INDArray> backprop(INDArray in, INDArray epsilon) { INDArray dLdz = Nd4j.ones(in.shape()); BooleanIndexing.replaceWhere(dLdz, alpha, Conditions.lessThanOrEqual(0.0)); dLdz.muli(epsilon); return new Pair<>(dLdz, null); }
double max = temp.amaxNumber().doubleValue(); int cntAbs = temp.scan(Conditions.absGreaterThanOrEqual(max - (max * threshold))).intValue();
@Override public INDArray computeScoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); BooleanIndexing.replaceWhere(scoreArr, 0.0, Conditions.lessThan(0.0));//max(0,1-y*yhat) scoreArr.muli(scoreArr); return scoreArr.sum(1); }
private INDArray calculateBernoulli(INDArray minorityLabels, INDArray labelMask, double targetMinorityDist) { INDArray minorityClass = minorityLabels.dup().muli(labelMask); INDArray majorityClass = Transforms.not(minorityLabels).muli(labelMask); //all minorityLabel class, keep masks as is //presence of minoriy class and donotmask minority windows set to true return label as is if (majorityClass.sumNumber().intValue() == 0 || (minorityClass.sumNumber().intValue() > 0 && donotMaskMinorityWindows)) return labelMask; //all majority class and set to not mask all majority windows sample majority class by 1-targetMinorityDist if (minorityClass.sumNumber().intValue() == 0 && !maskAllMajorityWindows) return labelMask.muli(1-targetMinorityDist); //Probabilities to be used for bernoulli sampling INDArray minoritymajorityRatio = minorityClass.sum(1).div(majorityClass.sum(1)); INDArray majorityBernoulliP = minoritymajorityRatio.muli(1 - targetMinorityDist).divi(targetMinorityDist); BooleanIndexing.replaceWhere(majorityBernoulliP,1.0, Conditions.greaterThan(1.0)); //if minority ratio is already met round down to 1.0 return majorityClass.muliColumnVector(majorityBernoulliP).addi(minorityClass); }
public double setScoreFor(INDArray parameters) { if (Nd4j.ENFORCE_NUMERICAL_STABILITY) { BooleanIndexing.applyWhere(parameters, Conditions.isNan(), new Value(Nd4j.EPS_THRESHOLD)); } layer.setParams(parameters); layer.computeGradientAndScore(); return layer.score(); }
@Override public IntegerTensor setWithMaskInPlace(IntegerTensor mask, Integer value) { if (this.getLength() != mask.getLength()) { throw new IllegalArgumentException("The lengths of the tensor and mask must match, but got tensor length: " + this.getLength() + ", mask length: " + mask.getLength()); } INDArray maskDup = unsafeGetNd4J(mask).dup(); if (value == 0.0) { INDArray swapOnesForZeros = maskDup.rsubi(1.0); tensor.muli(swapOnesForZeros); } else { Nd4j.getExecutioner().exec( new CompareAndSet(maskDup, value, Conditions.equals(1.0)) ); Nd4j.getExecutioner().exec( new CompareAndSet(tensor, maskDup, Conditions.notEquals(0.0)) ); } return this; }
private INDArray calcLogProbArray(INDArray x, INDArray preOutDistributionParams) { INDArray output = preOutDistributionParams.dup(); activationFn.getActivation(output, false); INDArray logOutput = Transforms.log(output, true); INDArray log1SubOut = Transforms.log(output.rsubi(1.0), false); //For numerical stability: if output = 0, then log(output) == -infinity //then x * log(output) = NaN, but lim(x->0, output->0)[ x * log(output) ] == 0 // therefore: want 0*log(0) = 0, NOT 0*log(0) = NaN by default BooleanIndexing.replaceWhere(logOutput, 0.0, Conditions.isInfinite()); //log(out)= +/- inf -> x == 0.0 -> 0 * log(0) = 0 BooleanIndexing.replaceWhere(log1SubOut, 0.0, Conditions.isInfinite()); //log(out)= +/- inf -> x == 0.0 -> 0 * log(0) = 0 return logOutput.muli(x).addi(x.rsub(1.0).muli(log1SubOut)); }
@Override public Pair<INDArray, INDArray> backprop(INDArray in, INDArray epsilon) { INDArray dLdz = Nd4j.ones(in.shape()); BooleanIndexing.replaceWhere(dLdz, alpha, Conditions.lessThanOrEqual(0.0)); dLdz.muli(epsilon); return new Pair<>(dLdz, null); }