@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = computeScoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) { score /= scoreArr.size(0); } return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = -scoreArr.sumNumber().doubleValue(); if (average) { score /= scoreArr.size(0); } return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) { score /= scoreArr.size(0); } return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); double score = -scoreArr.sumNumber().doubleValue(); if (average) { score /= scoreArr.size(0); } return score; }
@Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { INDArray scoreArr = computeScoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return score; }
/** * @param lower row vector of lower bounds * @param upper row vector of upper bounds */ public MinMaxStats(@NonNull INDArray lower, @NonNull INDArray upper) { // Check for 0 differences and round up to epsilon INDArray diff = upper.sub(lower); INDArray addedPadding = Transforms.max(diff, Nd4j.EPS_THRESHOLD).subi(diff); // If any entry in `addedPadding` is not 0, then we had to add something to prevent 0 difference, Add this same // value to the upper bounds to actually apply the padding, and log about it if (addedPadding.sumNumber().doubleValue() > 0) { log.info("API_INFO: max val minus min val found to be zero. Transform will round up to epsilon to avoid nans."); upper.addi(addedPadding); } this.lower = lower; this.upper = upper; }
/** * Computes the aggregate score as a sum of all of the individual scores of * each of the labels against each of the outputs of the network. For * the mixture density network, this is the negative log likelihood that * the given labels fall within the probability distribution described by * the mixture of gaussians of the network output. * @param labels Labels to score against the network. * @param preOutput Output of the network (before activation function has been called). * @param activationFn Activation function for the network. * @param mask Mask to be applied to labels (not used for MDN). * @param average Whether or not to return an average instead of a total score (not used). * @return Returns a single double which corresponds to the total score of all label values. */ @Override public double computeScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { // The score overall consists of the // sum of the negative log likelihoods for each // of the individual labels. INDArray scoreArr = computeScoreArray(labels, preOutput, activationFn, mask); double score = scoreArr.sumNumber().doubleValue(); if (average) { score /= scoreArr.size(0); } return score; }
/** * Estimate the variance of a single record with reduced # of dimensions. * @param data A single record with the same <i>N</i> features as the constructing data set * @param ndims The number of dimensions to include in calculation * @return The fraction (0 to 1) of the total variance covered by the <i>ndims</i> basis set. */ public double estimateVariance(INDArray data, int ndims) { INDArray dx = data.sub(mean); INDArray v = eigenvectors.transpose().mmul(dx.reshape(dx.columns(), 1)); INDArray t2 = Transforms.pow(v, 2); double fraction = t2.get(NDArrayIndex.interval(0, ndims)).sumNumber().doubleValue(); double total = t2.sumNumber().doubleValue(); return fraction / total; }
private INDArray calculateBernoulli(INDArray minorityLabels, INDArray labelMask, double targetMinorityDist) { INDArray minorityClass = minorityLabels.dup().muli(labelMask); INDArray majorityClass = Transforms.not(minorityLabels).muli(labelMask); //all minorityLabel class, keep masks as is //presence of minoriy class and donotmask minority windows set to true return label as is if (majorityClass.sumNumber().intValue() == 0 || (minorityClass.sumNumber().intValue() > 0 && donotMaskMinorityWindows)) return labelMask; //all majority class and set to not mask all majority windows sample majority class by 1-targetMinorityDist if (minorityClass.sumNumber().intValue() == 0 && !maskAllMajorityWindows) return labelMask.muli(1 - targetMinorityDist); //Probabilities to be used for bernoulli sampling INDArray minoritymajorityRatio = minorityClass.sum(1).div(majorityClass.sum(1)); INDArray majorityBernoulliP = minoritymajorityRatio.muli(1 - targetMinorityDist).divi(targetMinorityDist); BooleanIndexing.replaceWhere(majorityBernoulliP, 1.0, Conditions.greaterThan(1.0)); //if minority ratio is already met round down to 1.0 return majorityClass.muliColumnVector(majorityBernoulliP).addi(minorityClass); }
@Override public Pair<Double, INDArray> computeGradientAndScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { final INDArray scoreArr = Nd4j.create(labels.size(0), 1); final INDArray grad = Nd4j.ones(labels.shape()); calculate(labels, preOutput, activationFn, mask, scoreArr, grad); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return new Pair<>(score, grad); }
/** * Return a reduced basis set that covers a certain fraction of the variance of the data * @param variance The desired fractional variance (0 to 1), it will always be greater than the value. * @return The basis vectors as columns, size <i>N</i> rows by <i>ndims</i> columns, where <i>ndims</i> is less than or equal to <i>N</i> */ public INDArray reducedBasis(double variance) { INDArray vars = Transforms.pow(eigenvalues, -0.5, true); double res = vars.sumNumber().doubleValue(); double total = 0.0; int ndims = 0; for (int i = 0; i < vars.columns(); i++) { ndims++; total += vars.getDouble(i); if (total / res > variance) break; } INDArray result = Nd4j.create(eigenvectors.rows(), ndims); for (int i = 0; i < ndims; i++) result.putColumn(i, eigenvectors.getColumn(i)); return result; }
/** * Returns the covariance matrix of a data set of many records, each with N features. * It also returns the average values, which are usually going to be important since in this * version, all modes are centered around the mean. It's a matrix that has elements that are * expressed as average dx_i * dx_j (used in procedure) or average x_i * x_j - average x_i * average x_j * * @param in A matrix of vectors of fixed length N (N features) on each row * @return INDArray[2], an N x N covariance matrix is element 0, and the average values is element 1. */ public static INDArray[] covarianceMatrix(INDArray in) { long dlength = in.rows(); long vlength = in.columns(); INDArray sum = Nd4j.create(vlength); INDArray product = Nd4j.create(vlength, vlength); for (int i = 0; i < vlength; i++) sum.getColumn(i).assign(in.getColumn(i).sumNumber().doubleValue() / dlength); for (int i = 0; i < dlength; i++) { INDArray dx1 = in.getRow(i).sub(sum); product.addi(dx1.reshape(vlength, 1).mmul(dx1.reshape(1, vlength))); } product.divi(dlength); return new INDArray[] {product, sum}; }