private static INDArray createFromCSC(double[] data, int[] rowIndices, int[] columnPointers, int[] shape){ INDArray result = Nd4j.zeros(shape); int columns = shape[1]; int dataIdx = 0; for(int i = 0; i < columns; i++){ for(int k = dataIdx; k < (i == columnPointers.length-1 ? rowIndices.length : columnPointers[i+1]); k++, dataIdx++){ int j = rowIndices[k]; result.put(j, i, data[k]); //System.out.println("i = "+i+", k = "+k+ ", data[k] = "+data[k]+"\n matrix = "+result.toString()); } } return result; } }
@Override public DataSet call(String s) throws Exception { //Here: take a String, and map the characters to a one-hot representation Map<Character, Integer> cti = ctiBroadcast.getValue(); int length = s.length(); INDArray features = Nd4j.zeros(1, N_CHARS, length - 1); INDArray labels = Nd4j.zeros(1, N_CHARS, length - 1); char[] chars = s.toCharArray(); int[] f = new int[3]; int[] l = new int[3]; for (int i = 0; i < chars.length - 2; i++) { f[1] = cti.get(chars[i]); f[2] = i; l[1] = cti.get(chars[i + 1]); //Predict the next character given past and current characters l[2] = i; features.putScalar(f, 1.0); labels.putScalar(l, 1.0); } return new DataSet(features, labels); } }
INDArray zeros = Nd4j.zeros(nRows, nColumns);
public DataSet convertDataSet(int num) { int batchNumCount = 0; List<DataSet> dataSets = new ArrayList(); FileSystem fs = CommonUtils.openHdfsConnect(); try { while (batchNumCount != num && fileIterator.hasNext()) { ++ batchNumCount; String fullPath = fileIterator.next(); Writable labelText = new Text(FilenameUtils.getBaseName((new File(fullPath)).getParent())); INDArray features = null; INDArray label = Nd4j.zeros(1, labels.size()).putScalar(new int[]{0, labels.indexOf(labelText)}, 1); InputStream imageios = fs.open(new Path(fullPath)); features = asMatrix(imageios); imageios.close(); Nd4j.getAffinityManager().tagLocation(features, AffinityManager.Location.HOST); dataSets.add(new DataSet(features, label)); } } catch (Exception e) { throw new RuntimeException(e.getCause()); } finally { CommonUtils.closeHdfsConnect(fs); } if (dataSets.size() == 0) { return new DataSet(); } else { DataSet result = DataSet.merge( dataSets ); return result; } }
INDArray myArray = Nd4j.zeros(nRows, nColumns);
INDArray zerosColumn = Nd4j.zeros(3,1); originalArray.put(new INDArrayIndex[]{NDArrayIndex.all(), NDArrayIndex.point(2)}, zerosColumn); //All rows, column index 2 System.out.println("\n\n\nOriginal array, after put operation:\n" + originalArray);
@Override public INDArray toDense() { // Dummy way - going to use the conversion routines in level2 (?) INDArray result = Nd4j.zeros(shape()); int[] pointersB = pointerB.asInt(); int[] pointersE = pointerE.asInt(); for (int row = 0; row < rows(); row++) { for (int idx = pointersB[row]; idx < pointersE[row]; idx++) { result.put(row, columnsPointers.getInt(idx), values.getNumber(idx)); } } return result; }
@Override protected void setup() throws LibrecException { super.setup(); inputDim = numUsers; hiddenDim = conf.getInt("rec.hidden.dimension"); learningRate = conf.getDouble("rec.iterator.learnrate"); lambdaReg = conf.getDouble("rec.weight.regularization"); numIterations = conf.getInt("rec.iterator.maximum"); hiddenActivation = conf.get("rec.hidden.activation"); outputActivation = conf.get("rec.output.activation"); // transform the sparse matrix to INDArray int[] matrixShape = {numItems, numUsers}; trainSet = Nd4j.zeros(matrixShape); trainSetMask = Nd4j.zeros(matrixShape); for (MatrixEntry me: trainMatrix) { trainSet.put(me.column(), me.row(), me.get()); trainSetMask.put(me.column(), me.row(), 1); } }
private INDArray labelsMinusMu(INDArray labels, INDArray mu) { // Now that we have the mixtures, let's compute the negative // log likelihodd of the label against the long nSamples = labels.size(0); long labelsPerSample = labels.size(1); // This worked, but was actually much // slower than the for loop below. // labels = samples, mixtures, labels // mu = samples, mixtures // INDArray labelMinusMu = labels // .reshape('f', nSamples, labelsPerSample, 1) // .repeat(2, mMixtures) // .permute(0, 2, 1) // .subi(mu); // The above code does the same thing as the loop below, // but it does it with index magix instead of a for loop. // It turned out to be way less efficient than the simple 'for' here. INDArray labelMinusMu = Nd4j.zeros(nSamples, mMixtures, labelsPerSample); for (int k = 0; k < mMixtures; k++) { labelMinusMu.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.point(k), NDArrayIndex.all()}, labels); } labelMinusMu.subi(mu); return labelMinusMu; }
/** * Converts the sparse ndarray into a dense one * @return a dense ndarray */ @Override public INDArray toDense() { // TODO support view conversion INDArray result = Nd4j.zeros(shape()); switch (data().dataType()) { case DOUBLE: for (int i = 0; i < length; i++) { int[] idx = getUnderlyingIndicesOf(i).asInt(); double value = values.getDouble(i); result.putScalar(idx, value); } break; case FLOAT: for (int i = 0; i < length; i++) { int[] idx = getUnderlyingIndicesOf(i).asInt(); float value = values.getFloat(i); result.putScalar(idx, value); } break; default: throw new UnsupportedOperationException(); } return result; }
@Override protected void setup() throws LibrecException { super.setup(); inputDim = numItems; hiddenDim = conf.getInt("rec.hidden.dimension"); learningRate = conf.getDouble("rec.iterator.learnrate"); lambdaReg = conf.getDouble("rec.weight.regularization"); numIterations = conf.getInt("rec.iterator.maximum"); hiddenActivation = conf.get("rec.hidden.activation"); outputActivation = conf.get("rec.output.activation"); // transform the sparse matrix to INDArray // the sparse training matrix has been binarized int[] matrixShape = {numUsers, numItems}; trainSet = Nd4j.zeros(matrixShape); for (MatrixEntry me: trainMatrix) { trainSet.put(me.row(), me.column(), me.get()); } }
public static boolean checkMulManually(INDArray first, INDArray second, double maxRelativeDifference, double minAbsDifference) { //No apache commons element-wise multiply, but can do this manually INDArray result = first.mul(second); long[] shape = first.shape(); INDArray expected = Nd4j.zeros(first.shape()); for (int i = 0; i < shape[0]; i++) { for (int j = 0; j < shape[1]; j++) { double v = first.getDouble(i, j) * second.getDouble(i, j); expected.putScalar(new int[] {i, j}, v); } } if (!checkShape(expected, result)) return false; boolean ok = checkEntries(expected, result, maxRelativeDifference, minAbsDifference); if (!ok) { INDArray onCopies = Shape.toOffsetZeroCopy(first).mul(Shape.toOffsetZeroCopy(second)); printFailureDetails(first, second, expected, result, onCopies, "mul"); } return ok; }
public static boolean checkDivManually(INDArray first, INDArray second, double maxRelativeDifference, double minAbsDifference) { //No apache commons element-wise division, but can do this manually INDArray result = first.div(second); long[] shape = first.shape(); INDArray expected = Nd4j.zeros(first.shape()); for (int i = 0; i < shape[0]; i++) { for (int j = 0; j < shape[1]; j++) { double v = first.getDouble(i, j) / second.getDouble(i, j); expected.putScalar(new int[] {i, j}, v); } } if (!checkShape(expected, result)) return false; boolean ok = checkEntries(expected, result, maxRelativeDifference, minAbsDifference); if (!ok) { INDArray onCopies = Shape.toOffsetZeroCopy(first).mul(Shape.toOffsetZeroCopy(second)); printFailureDetails(first, second, expected, result, onCopies, "div"); } return ok; }
public INDArray getGradient(INDArray gradient, int slice, int[] shape) { boolean historicalInitialized = false; INDArray sqrtHistory; if (this.historicalGradient == null) { this.historicalGradient = Nd4j.zeros(shape).add(epsilon); historicalInitialized = true; } else if (!this.historicalGradient.isVector() && this.historicalGradient.slice(slice).length() != gradient.length()) throw new IllegalArgumentException("Illegal gradient"); if (historicalGradient.isVector()) sqrtHistory = sqrt(historicalGradient); else sqrtHistory = !historicalInitialized ? sqrt(historicalGradient.slice(slice)) : historicalGradient; INDArray learningRates; try { learningRates = sqrtHistory.rdivi(learningRate); } catch (ArithmeticException ae) { learningRates = sqrtHistory.rdivi(learningRate + epsilon); } if (gradient.length() != learningRates.length()) gradient.muli(learningRates.slice(slice)); else gradient.muli(learningRates); this.historicalGradient.slice(slice).addi(gradient.mul(gradient)); numIterations++; //ensure no zeros return gradient; }
INDArray bernoullis = Nd4j.zeros(labelMask.shape()); long currentTimeSliceEnd = label.size(2);
std = (batchCount == 1) ? Nd4j.zeros(mean.shape()) : Transforms.pow(next.getFeatureMatrix().std(0), 2); std.muli(batchCount); } else {
INDArray gradient = Nd4j.zeros(nSamples, preOutput.columns());