static INDArray compare(INDArray arr1, INDArray arr2, Predicate<Boolean []> predicate) { INDArray result = Nd4j.create(arr1.shape()); for (int i = 0; i < arr1.length(); i++) { boolean answer = predicate.test(new Boolean[]{arr1.getDouble(i) == 1.0, arr2.getDouble(i) == 1.0}); result.putScalar(i, answer ? 1.0 : 0.0); } return result; }
/** * Returns the covariance matrix of a data set of many records, each with N features. * It also returns the average values, which are usually going to be important since in this * version, all modes are centered around the mean. It's a matrix that has elements that are * expressed as average dx_i * dx_j (used in procedure) or average x_i * x_j - average x_i * average x_j * * @param in A matrix of vectors of fixed length N (N features) on each row * @return INDArray[2], an N x N covariance matrix is element 0, and the average values is element 1. */ public static INDArray[] covarianceMatrix(INDArray in) { long dlength = in.rows(); long vlength = in.columns(); INDArray sum = Nd4j.create(vlength); INDArray product = Nd4j.create(vlength, vlength); for (int i = 0; i < vlength; i++) sum.getColumn(i).assign(in.getColumn(i).sumNumber().doubleValue() / dlength); for (int i = 0; i < dlength; i++) { INDArray dx1 = in.getRow(i).sub(sum); product.addi(dx1.reshape(vlength, 1).mmul(dx1.reshape(1, vlength))); } product.divi(dlength); return new INDArray[] {product, sum}; }
@Override public Pair<Double, INDArray> computeGradientAndScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { final INDArray scoreArr = Nd4j.create(labels.size(0), 1); final INDArray grad = Nd4j.ones(labels.shape()); calculate(labels, preOutput, activationFn, mask, scoreArr, grad); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return new Pair<>(score, grad); }
/** * Divides each row by its max * * @param toScale the matrix to divide by its row maxes */ public static void scaleByMax(INDArray toScale) { INDArray scale = toScale.max(1); for (int i = 0; i < toScale.rows(); i++) { double scaleBy = scale.getDouble(i); toScale.putRow(i, toScale.getRow(i).divi(scaleBy)); } }
/** * Perform an operation along a diagonal * * @param x the ndarray to perform the operation on * @param func the operation to perform */ public static void doAlongDiagonal(INDArray x, Function<Number, Number> func) { if (x.isMatrix()) for (int i = 0; i < x.rows(); i++) x.put(i, i, func.apply(x.getDouble(i, i))); }
/** * Prepare the boundaries for processing * @param bounds the bounds * @param x the input in to the approximation * @return the lower and upper bounds as an array of ndarrays * (in that order) of the same shape as x */ public static INDArray[] prepareBounds(INDArray bounds,INDArray x) { return new INDArray[] {Nd4j.valueArrayOf(x.shape(),bounds.getDouble(0)), Nd4j.valueArrayOf(x.shape(),bounds.getDouble(1))}; }
@Override public INDArray put(INDArray indices, INDArray element) { if(indices.rank() > 2) { throw new ND4JIllegalArgumentException("Indices must be a vector or matrix."); if(indices.rows() == rank()) { NdIndexIterator ndIndexIterator = new NdIndexIterator(element.shape()); for(int i = 0; i < indices.columns(); i++) { int[] specifiedIndex = indices.getColumn(i).dup().data().asInt(); putScalar(specifiedIndex,element.getDouble(ndIndexIterator.next())); List<INDArray> arrList = new ArrayList<>(); if(indices.isMatrix() || indices.isColumnVector()) { for(int i = 0; i < indices.rows(); i++) { INDArray row = indices.getRow(i); for(int j = 0; j < row.length(); j++) { INDArray slice = slice(row.getInt(j)); Nd4j.getExecutioner().exec(new Assign(new INDArray[]{slice,element},new INDArray[]{slice})); arrList.add(slice(row.getInt(j))); else if(indices.isRowVector()) { for(int i = 0; i < indices.length(); i++) { arrList.add(slice(indices.getInt(i)));
public INDArray adjustMasks(INDArray label, INDArray labelMask, int minorityLabel, double targetDist) { labelMask = Nd4j.ones(label.size(0), label.size(2)); INDArray bernoullis = Nd4j.zeros(labelMask.shape()); long currentTimeSliceEnd = label.size(2); INDArray currentWindowBernoulli = bernoullis.get(NDArrayIndex.all(), NDArrayIndex.interval(currentTimeSliceStart, currentTimeSliceEnd)); INDArray currentMask = labelMask.get(NDArrayIndex.all(), NDArrayIndex.interval(currentTimeSliceStart, currentTimeSliceEnd)); INDArray currentLabel; if (label.size(1) == 2) { currentLabel = label.get(NDArrayIndex.all(), NDArrayIndex.point(minorityLabel), NDArrayIndex.interval(currentTimeSliceStart, currentTimeSliceEnd)); } else { currentLabel = label.get(NDArrayIndex.all(), NDArrayIndex.point(0), NDArrayIndex.interval(currentTimeSliceStart, currentTimeSliceEnd)); if (minorityLabel == 0) { currentLabel = Transforms.not(currentLabel); currentWindowBernoulli.assign(calculateBernoulli(currentLabel, currentMask, targetDist)); return Nd4j.getExecutioner().exec( new BernoulliDistribution(Nd4j.createUninitialized(bernoullis.shape()), bernoullis), Nd4j.getRandom());
@Override public boolean add(Double aDouble) { if(container == null) { container = Nd4j.create(10); } else if(size == container.length()) { INDArray newContainer = Nd4j.create(container.length() * 2); newContainer.put(new INDArrayIndex[]{NDArrayIndex.interval(0,container.length())},container); container = newContainer; } container.putScalar(size++,aDouble); return true; }
@Override public INDArray sample(int[] shape) { int numRows = 1; for (int i = 0; i < shape.length - 1; i++) numRows *= shape[i]; int numCols = shape[shape.length - 1]; val flatShape = new int[]{numRows, numCols}; val flatRng = Nd4j.getExecutioner().exec(new GaussianDistribution(Nd4j.createUninitialized(flatShape, Nd4j.order()), 0.0, 1.0), random); long m = flatRng.rows(); long n = flatRng.columns(); val s = Nd4j.create(m < n ? m : n); val u = m < n ? Nd4j.create(m, n) : Nd4j.create(m, m); val v = Nd4j.create(n, n, 'f'); Nd4j.getBlasWrapper().lapack().gesvd(flatRng, s, u, v); // FIXME: int cast if (gains == null) { if (u.rows() == numRows && u.columns() == numCols) { return v.get(NDArrayIndex.interval(0, numRows), NDArrayIndex.interval(0, numCols)).mul(gain).reshape(ArrayUtil.toLongArray(shape)); } else { return u.get(NDArrayIndex.interval(0, numRows), NDArrayIndex.interval(0, numCols)).mul(gain).reshape(ArrayUtil.toLongArray(shape)); } } else { throw new UnsupportedOperationException(); } }
@Override public void geqrf(INDArray A, INDArray R) { // FIXME: int cast if (A.rows() > Integer.MAX_VALUE || A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); int m = (int) A.rows(); int n = (int) A.columns(); INDArray INFO = Nd4j.createArrayFromShapeBuffer(Nd4j.getDataBufferFactory().createInt(1), Nd4j.getShapeInfoProvider().createShapeInformation(new int[] {1, 1}).getFirst()); if (R.rows() != A.columns() || R.columns() != A.columns()) { throw new Error("geqrf: R must be N x N (n = columns in A)"); } if (A.data().dataType() == DataBuffer.Type.DOUBLE) { dgeqrf(m, n, A, R, INFO); } else if (A.data().dataType() == DataBuffer.Type.FLOAT) { sgeqrf(m, n, A, R, INFO); } else { throw new UnsupportedOperationException(); } if (INFO.getInt(0) < 0) { throw new Error("Parameter #" + INFO.getInt(0) + " to getrf() was not valid"); } }
/** * Broadcast greater than or equal to op. See: {@link BroadcastGreaterThanOrEqual} */ public static INDArray gte(INDArray x, INDArray y, INDArray z, int... dimensions) { if(dimensions == null) { Preconditions.checkArgument(Arrays.equals(x.shape(),y.shape()),getFormattedShapeErrorMessageXy(x,y)); Preconditions.checkArgument(Arrays.equals(x.shape(),z.shape()),getFormattedShapeErrorMessageXResult(x,z)); return Nd4j.getExecutioner().execAndReturn(new OldGreaterThanOrEqual(x,y,z,x.length())); } return Nd4j.getExecutioner().execAndReturn(new BroadcastGreaterThanOrEqual(x,y,z,dimensions)); }
/** * Return a reduced basis set that covers a certain fraction of the variance of the data * @param variance The desired fractional variance (0 to 1), it will always be greater than the value. * @return The basis vectors as columns, size <i>N</i> rows by <i>ndims</i> columns, where <i>ndims</i> is less than or equal to <i>N</i> */ public INDArray reducedBasis(double variance) { INDArray vars = Transforms.pow(eigenvalues, -0.5, true); double res = vars.sumNumber().doubleValue(); double total = 0.0; int ndims = 0; for (int i = 0; i < vars.columns(); i++) { ndims++; total += vars.getDouble(i); if (total / res > variance) break; } INDArray result = Nd4j.create(eigenvectors.rows(), ndims); for (int i = 0; i < ndims; i++) result.putColumn(i, eigenvectors.getColumn(i)); return result; }
/** * ?tbsv solves a system of linear equations whose coefficients are in a triangular band matrix. * * @param order * @param Uplo * @param TransA * @param Diag * @param A * @param X */ @Override public void tbsv(char order, char Uplo, char TransA, char Diag, INDArray A, INDArray X) { if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL) OpProfiler.getInstance().processBlasCall(false, A, X); // FIXME: int cast if (X.data().dataType() == DataBuffer.Type.DOUBLE) { DefaultOpExecutioner.validateDataType(DataBuffer.Type.DOUBLE, A, X); dtbsv(order, Uplo, TransA, Diag, (int) X.length(), (int) A.columns(), A, (int) A.size(0), X, X.majorStride()); } else { DefaultOpExecutioner.validateDataType(DataBuffer.Type.FLOAT, A, X); stbsv(order, Uplo, TransA, Diag, (int) X.length(), (int) A.columns(), A, (int) A.size(0), X, X.majorStride()); } }
public static void checkForInf(INDArray z) { if (Nd4j.getExecutioner().getProfilingMode() != OpExecutioner.ProfilingMode.INF_PANIC && Nd4j.getExecutioner().getProfilingMode() != OpExecutioner.ProfilingMode.ANY_PANIC) return; int match = 0; if (!z.isScalar()) { MatchCondition condition = new MatchCondition(z, Conditions.isInfinite()); match = Nd4j.getExecutioner().exec(condition, Integer.MAX_VALUE).getInt(0); } else { if (z.data().dataType() == DataBuffer.Type.DOUBLE) { if (Double.isInfinite(z.getDouble(0))) match = 1; } else { if (Float.isInfinite(z.getFloat(0))) match = 1; } } if (match > 0) throw new ND4JIllegalStateException("P.A.N.I.C.! Op.Z() contains " + match + " Inf value(s)"); }
/** * Computes the eigenvalues of a general matrix. */ public static IComplexNDArray eigenvalues(INDArray A) { assert A.rows() == A.columns(); INDArray WR = Nd4j.create(A.rows(), A.rows()); INDArray WI = WR.dup(); Nd4j.getBlasWrapper().geev('N', 'N', A.dup(), WR, WI, dummy, dummy); return Nd4j.createComplex(WR, WI); }
/** * Estimate the variance of a single record with reduced # of dimensions. * @param data A single record with the same <i>N</i> features as the constructing data set * @param ndims The number of dimensions to include in calculation * @return The fraction (0 to 1) of the total variance covered by the <i>ndims</i> basis set. */ public double estimateVariance(INDArray data, int ndims) { INDArray dx = data.sub(mean); INDArray v = eigenvectors.transpose().mmul(dx.reshape(dx.columns(), 1)); INDArray t2 = Transforms.pow(v, 2); double fraction = t2.get(NDArrayIndex.interval(0, ndims)).sumNumber().doubleValue(); double total = t2.sumNumber().doubleValue(); return fraction / total; }
public INDArray getGradient(INDArray gradient, int slice, int[] shape) { boolean historicalInitialized = false; INDArray sqrtHistory; if (this.historicalGradient == null) { this.historicalGradient = Nd4j.zeros(shape).add(epsilon); historicalInitialized = true; } else if (!this.historicalGradient.isVector() && this.historicalGradient.slice(slice).length() != gradient.length()) throw new IllegalArgumentException("Illegal gradient"); if (historicalGradient.isVector()) sqrtHistory = sqrt(historicalGradient); else sqrtHistory = !historicalInitialized ? sqrt(historicalGradient.slice(slice)) : historicalGradient; INDArray learningRates; try { learningRates = sqrtHistory.rdivi(learningRate); } catch (ArithmeticException ae) { learningRates = sqrtHistory.rdivi(learningRate + epsilon); } if (gradient.length() != learningRates.length()) gradient.muli(learningRates.slice(slice)); else gradient.muli(learningRates); this.historicalGradient.slice(slice).addi(gradient.mul(gradient)); numIterations++; //ensure no zeros return gradient; }
public static void printNDArrayHeader(INDArray array) { System.out.println(array.data().dataType() + " - order=" + array.ordering() + ", offset=" + array.offset() + ", shape=" + Arrays.toString(array.shape()) + ", stride=" + Arrays.toString(array.stride()) + ", length=" + array.length() + ", data().length()=" + array.data().length()); }
m.muli(config.getBeta1()).addi(gradient.mul(1 - config.getBeta1())); u.muli(config.getBeta2()); Transforms.abs(gradient, false); //In-place should be OK here, original gradient values aren't used again later Nd4j.getExecutioner().exec(new OldMax(u, gradient, u, u.length())); u.addi(1e-32); // prevent NaNs in params gradient.assign(m).muli(alphat).divi(u);