@Override public INDArray getPFactor(int M, INDArray ipiv) { // The simplest permutation is the identity matrix INDArray P = Nd4j.eye(M); // result is a square matrix with given size for (int i = 0; i < ipiv.length(); i++) { int pivot = ipiv.getInt(i) - 1; // Did we swap row #i with anything? if (pivot > i) { // don't reswap when we get lower down in the vector INDArray v1 = P.getColumn(i).dup(); // because of row vs col major order we'll ... INDArray v2 = P.getColumn(pivot); // ... make a transposed matrix immediately P.putColumn(i, v2); P.putColumn(pivot, v1); // note dup() above is required - getColumn() is a 'view' } } return P; // the permutation matrix - contains a single 1 in any row and column }
out.putColumn(i, in.getColumn(list.get(i)));
INDArray lastColumn = originalArray.getColumn(4); System.out.println(); System.out.println("First row:\n" + firstRow);
/** * Return a reduced basis set that covers a certain fraction of the variance of the data * @param variance The desired fractional variance (0 to 1), it will always be greater than the value. * @return The basis vectors as columns, size <i>N</i> rows by <i>ndims</i> columns, where <i>ndims</i> is less than or equal to <i>N</i> */ public INDArray reducedBasis(double variance) { INDArray vars = Transforms.pow(eigenvalues, -0.5, true); double res = vars.sumNumber().doubleValue(); double total = 0.0; int ndims = 0; for (int i = 0; i < vars.columns(); i++) { ndims++; total += vars.getDouble(i); if (total / res > variance) break; } INDArray result = Nd4j.create(eigenvectors.rows(), ndims); for (int i = 0; i < ndims; i++) result.putColumn(i, eigenvectors.getColumn(i)); return result; }
/** * Returns the covariance matrix of a data set of many records, each with N features. * It also returns the average values, which are usually going to be important since in this * version, all modes are centered around the mean. It's a matrix that has elements that are * expressed as average dx_i * dx_j (used in procedure) or average x_i * x_j - average x_i * average x_j * * @param in A matrix of vectors of fixed length N (N features) on each row * @return INDArray[2], an N x N covariance matrix is element 0, and the average values is element 1. */ public static INDArray[] covarianceMatrix(INDArray in) { long dlength = in.rows(); long vlength = in.columns(); INDArray sum = Nd4j.create(vlength); INDArray product = Nd4j.create(vlength, vlength); for (int i = 0; i < vlength; i++) sum.getColumn(i).assign(in.getColumn(i).sumNumber().doubleValue() / dlength); for (int i = 0; i < dlength; i++) { INDArray dx1 = in.getRow(i).sub(sum); product.addi(dx1.reshape(vlength, 1).mmul(dx1.reshape(1, vlength))); } product.divi(dlength); return new INDArray[] {product, sum}; }
/** * This method performs a dimensionality reduction, including principal components * that cover a fraction of the total variance of the system. It does all calculations * about the mean. * @param in A matrix of datapoints as rows, where column are features with fixed number N * @param variance The desired fraction of the total variance required * @return The reduced basis set */ public static INDArray pca2(INDArray in, double variance) { // let's calculate the covariance and the mean INDArray[] covmean = covarianceMatrix(in); // use the covariance matrix (inverse) to find "force constants" and then break into orthonormal // unit vector components INDArray[] pce = principalComponents(covmean[0]); // calculate the variance of each component INDArray vars = Transforms.pow(pce[1], -0.5, true); double res = vars.sumNumber().doubleValue(); double total = 0.0; int ndims = 0; for (int i = 0; i < vars.columns(); i++) { ndims++; total += vars.getDouble(i); if (total / res > variance) break; } INDArray result = Nd4j.create(in.columns(), ndims); for (int i = 0; i < ndims; i++) result.putColumn(i, pce[0].getColumn(i)); return result; }
@Override public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { double[] d = computeScoreNumDenom(labels, preOutput, activationFn, mask, false); double numerator = d[0]; double denominator = d[1]; if (numerator == 0.0 && denominator == 0.0) { //Zero score -> zero gradient return Nd4j.create(preOutput.shape()); } double secondTerm = numerator / (denominator * denominator); INDArray dLdOut; if (labels.size(1) == 1) { //Single binary output case dLdOut = labels.mul(1 + beta * beta).divi(denominator).subi(secondTerm); } else { //Softmax case: the getColumn(1) here is to account for the fact that we're using prob(class1) // only in the score function; column(1) is equivalent to output for the single output case dLdOut = Nd4j.create(labels.shape()); dLdOut.getColumn(1).assign(labels.getColumn(1).mul(1 + beta * beta).divi(denominator).subi(secondTerm)); } //Negate relative to description in paper, as we want to *minimize* 1.0-fMeasure, which is equivalent to // maximizing fMeasure dLdOut.negi(); INDArray dLdPreOut = activationFn.backprop(preOutput, dLdOut).getFirst(); if (mask != null) { dLdPreOut.muliColumnVector(mask); } return dLdPreOut; }
int[] specifiedIndex = indices.getColumn(i).dup().data().asInt(); ret.putScalar(i,getDouble(specifiedIndex));
INDArray factor = Nd4j.create(n, k, 'f'); for (int i = 0; i < k; i++) { factor.putColumn(i, V.getColumn(i));
int[] specifiedIndex = indices.getColumn(i).dup().data().asInt(); val v = getDouble(specifiedIndex); ret.putScalar(i, v);
INDArray factor = Nd4j.create(n, nDims, 'f'); for (int i = 0; i < nDims; i++) { factor.putColumn(i, V.getColumn(i));
NdIndexIterator ndIndexIterator = new NdIndexIterator(element.shape()); for(int i = 0; i < indices.columns(); i++) { int[] specifiedIndex = indices.getColumn(i).dup().data().asInt(); putScalar(specifiedIndex,element.getDouble(ndIndexIterator.next()));
/** * Get the residual plot, only for examples of the specified class.. The residual plot is defined as a histogram of<br> * |label_i - prob(class_i | input)| for all and examples; for this particular method, only predictions where * i == labelClassIdx are included.<br> * In general, small residuals indicate a superior classifier to large residuals. * * @param labelClassIdx Index of the class to get the residual plot for * @return Residual plot (histogram) - all predictions/classes */ public Histogram getResidualPlot(int labelClassIdx) { String title = "Residual Plot - Predictions for Label Class " + labelClassIdx; int[] counts = residualPlotByLabelClass.getColumn(labelClassIdx).dup().data().asInt(); return new Histogram(title, 0.0, 1.0, counts); }
public BenchmarkDataSetIterator(int[] featuresShape, int numLabels, int totalIterations) { this.baseFeatures = Nd4j.rand(featuresShape); this.baseLabels = Nd4j.create(featuresShape[0], numLabels); this.baseLabels.getColumn(1).assign(1.0); Nd4j.getExecutioner().commit(); this.limit = totalIterations; }
@Override public INDArray ndOp(INDArray features, INDArray adjacencyMatrix) { INDArray[] maxes = new INDArray[features.columns()]; for (int fCol = 0; fCol < features.columns(); fCol++) { INDArray mul = adjacencyMatrix.transpose().mulColumnVector(features.getColumn(fCol)); maxes[fCol] = mul.max(0).transpose(); } return Nd4j.hstack(maxes); }
@Override public INDArray ndOp(INDArray features, INDArray adjacencyMatrix) { double sigma = 16; INDArray[] sumsOfSquareDiffs = new INDArray[adjacencyMatrix.rows()]; for (int node = 0; node < adjacencyMatrix.rows(); node++) { INDArray column = adjacencyMatrix.getColumn(node); INDArray repeat = features.getRow(node).repeat(0, features.rows()).muliColumnVector(column); INDArray sub = repeat.sub(features.mulColumnVector(column)); sumsOfSquareDiffs[node] = Transforms.pow(sub, 2).sum(0); } INDArray sumOfSquareDiffs = Nd4j.vstack(sumsOfSquareDiffs).muli(-(1d / Math.pow(sigma, 2))); return Transforms.exp(sumOfSquareDiffs); }
@Override public INDArray ndOp(INDArray features, INDArray adjacencyMatrix) { INDArray[] norms = new INDArray[adjacencyMatrix.rows()]; for (int node = 0; node < adjacencyMatrix.rows(); node++) { INDArray nodeFeatures = features.getRow(node); INDArray adjs = adjacencyMatrix.transpose().getColumn(node).repeat(1, features.columns()); INDArray repeat = nodeFeatures.repeat(0, features.rows()).mul(adjs); INDArray sub = repeat.sub(features.mul(adjs)); INDArray norm = sub.norm1(0); norms[node] = norm; } return Nd4j.vstack(norms); }