Refine search
/** * The number of vectors * in each slice of an ndarray. * @param arr the array to * get the number * of vectors per slice for * @param rank the dimensions to get the number of vectors per slice for * @return the number of vectors per slice */ public static long vectorsPerSlice(INDArray arr, int... rank) { if (arr.rank() > 2) { return arr.size(-2) * arr.size(-1); } return arr.size(-1); }
@Override public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } final INDArray grad = Nd4j.ones(labels.shape()); calculate(labels, preOutput, activationFn, mask, null, grad); return grad; }
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } /* y_hat is -1 or 1 hinge loss is max(0,1-y_hat*y) */ INDArray output = activationFn.getActivation(preOutput.dup(), true); INDArray scoreArr = output.muli(labels); //y*yhat scoreArr.rsubi(1.0); //1 - y*yhat if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; // 1 - y*yhat }
long[] shape = arrays[0].shape(); INDArray[] temp = new INDArray[arrays.length]; boolean hasMasks = false; for (int i = 0; i < arrays.length; i++) { nExamples += arrays[i].size(0); long[] thisShape = arrays[i].shape(); if (thisShape.length != 4) { throw new IllegalStateException("Cannot merge 4d arrays with non 4d arrays"); if (masks != null && masks[i] != null && masks[i] != null) { hasMasks = true; if (masks[i].rank() != 2) { throw new UnsupportedOperationException("Cannot merged 4d arrays with masks that are not rank 2." + " Got mask array with rank: " + masks[i].rank()); INDArray outMask = null; if (hasMasks) { outMask = DataSetUtil.mergePerOutputMasks2d(out.shape(), arrays, masks);
if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); INDArray postOutput = activationFn.getActivation(preOutput.dup(), true); + " Got mask array with shape " + Arrays.toString(mask.shape()) + "; per-output masking is not " + "supported for LossCosineProximity");
if (!rowVector.isRowVector() || this.rank() > 1 && rowVector.rank() > 1 && this.size(1) != rowVector.size(1) || rowVector.length() <= 1) { throw new IllegalStateException("Mismatched shapes (shape = " + Arrays.toString(shape()) + ", row vector shape =" + Arrays.toString(rowVector.shape()) + ")"); return doRowWise(rowVector.dup(), operation);
private String format(INDArray arr, int offset, boolean summarize) { int rank = arr.rank(); if (arr.isScalar()) { if (arr.rank() == 3 && arr.slice(i).isRowVector()) sb.append("["); if (arr.ordering() == 'f' && arr.rank() > 2 && arr.size(arr.rank() - 1) == 1) { sb.append(format(arr.dup('c').slice(i), offset, summarize)); } else if(arr.rank() <= 1 || arr.length() == 1) { sb.append(format(Nd4j.scalar(arr.getDouble(0)),offset,summarize));
val shape = arr.shape(); if (idx instanceof NDArrayIndexAll) { encounteredAll = true; if (i < arr.rank() && arr.size(i) == 1) oneDimensionWithAllEncountered.add(i); if (Shape.isRowVectorShape(arr.shape())) accumShape.add(0, 1L); else accumStrides.addAll(pointStrides); while (accumOffsets.size() < accumShape.size()) { if (Shape.isRowVectorShape(arr.shape())) accumOffsets.add(0, 0L); else this.offset = 0; if (numIntervals > 0 && arr.rank() > 2) { if (encounteredAll && arr.size(0) != 1 || indexes[0] instanceof PointIndex)
/** * This method stacks vertically examples with the same shape, increasing result dimensionality. I.e. if you provide bunch of 3D tensors, output will be 4D tensor. Alignment is always applied to axis 0. * * @return */ public static INDArray pile(INDArray... arrays) { // if we have vectors as input, it's just vstack use case if (arrays[0].isRowVector() && arrays[0].rank() == 2) { return Nd4j.vstack(arrays); } long[] shape = arrays[0].shape(); long[] newShape = ArrayUtils.add(shape, 0, 1); boolean shouldReshape = true; if (arrays[0].size(0) == 1) shouldReshape = false; List<INDArray> reshaped = new ArrayList<>(); for(INDArray array: arrays) { if (!shouldReshape) reshaped.add(array); else reshaped.add(array.reshape(array.ordering(), newShape)); } return Nd4j.vstack(reshaped); }
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } /* y_hat is -1 or 1 hinge loss is max(0,1-y_hat*y) */ //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup())); INDArray output = activationFn.getActivation(preOutput.dup(), true); INDArray scoreArr = output.muli(labels); //y*yhat scoreArr.rsubi(1.0); //1 - y*yhat if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; // 1 - y*yhat }
public static INDArray tailor3d2d(@NonNull INDArray data, INDArray mask) { if (data.size(0) != mask.size(0) || data.size(2) != mask.size(1)) { throw new IllegalArgumentException( "Invalid mask array/data combination: got data with shape [minibatch, vectorSize, timeSeriesLength] = " + Arrays.toString(data.shape()) + "; got mask with shape [minibatch,timeSeriesLength] = " + Arrays.toString(mask.shape()) + "; minibatch and timeSeriesLength dimensions must match"); data = data.dup('f'); val shape = data.shape(); INDArray as2d; if (shape[0] == 1) { mask = mask.dup('f');
/** * Return the number of vectors for an array * the number of vectors for an array * @param arr the array to calculate the number of vectors for * @return the number of vectors for the given array */ public static long numVectors(INDArray arr) { if (arr.rank() == 1) return 1; else if (arr.rank() == 2) return arr.size(0); else { int prod = 1; for (int i = 0; i < arr.rank() - 1; i++) { prod *= arr.size(i); } return prod; } }
@Override public Pair<Double, INDArray> computeGradientAndScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { final INDArray scoreArr = Nd4j.create(labels.size(0), 1); final INDArray grad = Nd4j.ones(labels.shape()); calculate(labels, preOutput, activationFn, mask, scoreArr, grad); double score = scoreArr.sumNumber().doubleValue(); if (average) score /= scoreArr.size(0); return new Pair<>(score, grad); }
@Override public INDArray preProcess(INDArray input, int miniBatchSize) { if (input.rank() != 4) throw new IllegalArgumentException( "Invalid input: expect CNN activations with rank 4 (received input with shape " + Arrays.toString(input.shape()) + ")"); //Input: 4d activations (CNN) //Output: 3d activations (RNN) if (input.ordering() != 'c') input = input.dup('c'); int[] shape = input.shape(); //[timeSeriesLength*miniBatchSize, numChannels, inputHeight, inputWidth] //First: reshape 4d to 2d, as per CnnToFeedForwardPreProcessor INDArray twod = input.reshape('c', input.size(0), ArrayUtil.prod(input.shape()) / input.size(0)); //Second: reshape 2d to 3d, as per FeedForwardToRnnPreProcessor INDArray reshaped = twod.dup('f').reshape('f', miniBatchSize, shape[0] / miniBatchSize, product); return reshaped.permute(0, 2, 1); }
throws IOException { if (indArray.isView()) indArray = indArray.dup(indArray.ordering()); jsonGenerator.writeStartObject(); DataBuffer view = indArray.data(); for (int i = 0; i < indArray.rank(); i++) { jsonGenerator.writeNumber(indArray.size(i)); for (int i = 0; i < indArray.rank(); i++) jsonGenerator.writeNumber(indArray.stride(i)); jsonGenerator.writeEndArray(); jsonGenerator.writeNumberField("rankField", indArray.rank()); jsonGenerator.writeNumberField("numElements", view.length()); jsonGenerator.writeStringField("orderingField", String.valueOf(indArray.ordering()));
System.out.println("Num. Rows: " + myArray.rows()); System.out.println("Num. Columns: " + myArray.columns()); System.out.println("Num. Dimensions: " + myArray.rank()); //2 dimensions -> rank 2 System.out.println("Shape: " + Arrays.toString(myArray.shape())); //[3,5] -> 3 rows, 5 columns System.out.println("Length: " + myArray.length()); // 3 rows * 5 columns = 15 total elements System.out.println("size(0) == nRows: " + myArray.size(0)); //Also equivalent to: .shape()[0] System.out.println("size(1) == nCols: " + myArray.size(1)); //Also equivalent to: .shape()[1] System.out.println("Is a vector: " + myArray.isVector()); System.out.println("Is a scalar: " + myArray.isScalar());
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } /* mean of (yhat - y * log(yhat)) */ //INDArray postOutput = Nd4j.utioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup())); INDArray postOutput = activationFn.getActivation(preOutput.dup(), true); INDArray scoreArr = Transforms.log(postOutput); scoreArr.muli(labels); scoreArr = postOutput.sub(scoreArr); if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
throw new IllegalArgumentException("You have to provide at least one of scoreOutput or gradientOutput!"); if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); final INDArray postOutput = activationFn.getActivation(preOutput.dup(), true); long examples = positive.size(0); for (int i = 0; i < examples; i++) { final INDArray locCfn = postOutput.getRow(i); final long[] shape = locCfn.shape(); gradientOutput.assign(activationFn.backprop(preOutput.dup(), gradientOutput).getFirst());
/** * The number of vectors * in each slice of an ndarray. * @param arr the array to * get the number * of vectors per slice for * @return the number of vectors per slice */ public static long matricesPerSlice(INDArray arr) { if (arr.rank() == 3) { return 1; } else if (arr.rank() > 3) { int ret = 1; for (int i = 1; i < arr.rank() - 2; i++) { ret *= arr.size(i); } return ret; } return arr.size(-2); }
/** * Returns a column vector where each entry is the nth bilinear * product of the nth slices of the two tensors. */ @Override public INDArray bilinearProducts(INDArray curr, INDArray in) { assert curr.shape().length == 3; if (in.columns() != 1) { throw new AssertionError("Expected a column vector"); } if (in.rows() != curr.size(curr.shape().length - 1)) { throw new AssertionError("Number of rows in the input does not match number of columns in tensor"); } if (curr.size(curr.shape().length - 2) != curr.size(curr.shape().length - 1)) { throw new AssertionError("Can only perform this operation on a SimpleTensor with square slices"); } INDArray ret = Nd4j.create(curr.slices(), 1); INDArray inT = in.transpose(); for (int i = 0; i < curr.slices(); i++) { INDArray slice = curr.slice(i); INDArray inTTimesSlice = inT.mmul(slice); ret.putScalar(i, Nd4j.getBlasWrapper().dot(inTTimesSlice, in)); } return ret; }