@Override protected double predict(int userIdx, int itemIdx) throws LibrecException { INDArray predictedRatingVector = autoRecModel.output(trainSet.getRow(itemIdx)); return predictedRatingVector.getDouble(userIdx); } }
/** * Read line via input streams * * @param filePath the input stream ndarray * @param split the split separator * @return the read txt method */ @Deprecated public static void writeNumpy(INDArray write, String filePath, String split) throws IOException { BufferedWriter writer = new BufferedWriter(new FileWriter(filePath)); for (int i = 0; i < write.rows(); i++) { StringBuilder sb = new StringBuilder(); INDArray row = write.getRow(i); for (int j = 0; j < row.columns(); j++) { sb.append(row.getDouble(j)); if (j < row.columns() - 1) // not the last element sb.append(split); } sb.append("\n"); writer.write(sb.toString()); } writer.flush(); writer.close(); }
/** * Rotate a matrix 90 degrees * * @param toRotate the matrix to rotate * @return the rotated matrix */ @Override public void rot90(INDArray toRotate) { if (!toRotate.isMatrix()) throw new IllegalArgumentException("Only rotating matrices"); INDArray start = toRotate.transpose(); for (int i = 0; i < start.rows(); i++) start.putRow(i, reverse(start.getRow(i))); }
/** * Divides each row by its max * * @param toScale the matrix to divide by its row maxes */ public static void scaleByMax(INDArray toScale) { INDArray scale = toScale.max(1); for (int i = 0; i < toScale.rows(); i++) { double scaleBy = scale.getDouble(i); toScale.putRow(i, toScale.getRow(i).divi(scaleBy)); } }
out.putRow(i, in.getRow(list.get(i)));
INDArray firstRow = originalArray.getRow(0); INDArray lastColumn = originalArray.getColumn(4); System.out.println(); INDArray firstRowDup = originalArray.getRow(0).dup(); //We now have a copy of the first row. i.e., firstRowDup is NOT a view of originalArray firstRowDup.addi(100); System.out.println("\n\n\n");
/** * Create from a matrix. The rows are the indices * The columns are the individual element in each ndarrayindex * * @param index the matrix to getFloat indices from * @return the indices to getFloat */ public static INDArrayIndex[] create(INDArray index) { if (index.isMatrix()) { if (index.rows() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); NDArrayIndex[] ret = new NDArrayIndex[(int) index.rows()]; for (int i = 0; i < index.rows(); i++) { INDArray row = index.getRow(i); val nums = new long[(int) index.getRow(i).columns()]; for (int j = 0; j < row.columns(); j++) { nums[j] = (int) row.getFloat(j); } NDArrayIndex idx = new NDArrayIndex(nums); ret[i] = idx; } return ret; } else if (index.isVector()) { long[] indices = NDArrayUtil.toLongs(index); return new NDArrayIndex[] {new NDArrayIndex(indices)}; } throw new IllegalArgumentException("Passed in ndarray must be a matrix or a vector"); }
sentenceLength = ((List)((Pair)tokenizedSentences.get(i)).getFirst()).size(); if(sentenceLength >= maxLength) { featuresMask.getRow(i).assign(Double.valueOf(1.0D)); } else { featuresMask.get(new INDArrayIndex[]{NDArrayIndex.point(i), NDArrayIndex.interval(0, sentenceLength)}).assign(Double.valueOf(1.0D));
final INDArray locCfn = postOutput.getRow(i); final long[] shape = locCfn.shape(); final INDArray locPositive = positive.getRow(i); final INDArray locNegative = negative.getRow(i); final Double locNormFactor = normFactor.getDouble(i); if (mask != null) { final INDArray perLabel = classificationDifferences.sum(0); LossUtil.applyMask(perLabel, mask.getRow(i)); perLabel.sum(scoreOutput.getRow(i), 0); } else { classificationDifferences.sum(scoreOutput.getRow(i), 0, 1); gradientOutput.getRow(i).assign(classificationDifferences.sum(0).addi(classificationDifferences.sum(1).transposei().negi()));
/** * Returns the covariance matrix of a data set of many records, each with N features. * It also returns the average values, which are usually going to be important since in this * version, all modes are centered around the mean. It's a matrix that has elements that are * expressed as average dx_i * dx_j (used in procedure) or average x_i * x_j - average x_i * average x_j * * @param in A matrix of vectors of fixed length N (N features) on each row * @return INDArray[2], an N x N covariance matrix is element 0, and the average values is element 1. */ public static INDArray[] covarianceMatrix(INDArray in) { long dlength = in.rows(); long vlength = in.columns(); INDArray sum = Nd4j.create(vlength); INDArray product = Nd4j.create(vlength, vlength); for (int i = 0; i < vlength; i++) sum.getColumn(i).assign(in.getColumn(i).sumNumber().doubleValue() / dlength); for (int i = 0; i < dlength; i++) { INDArray dx1 = in.getRow(i).sub(sum); product.addi(dx1.reshape(vlength, 1).mmul(dx1.reshape(1, vlength))); } product.divi(dlength); return new INDArray[] {product, sum}; }
for(int i = 0; i < indices.rows(); i++) { if(i == 0) { INDArray row = indices.getRow(i); for(int j = 0; j < row.length(); j++) { arrList.add(slice(row.getInt(j)));
for(int i = 0; i < indices.rows(); i++) { if(i == 0) { INDArray row = indices.getRow(i); for(int j = 0; j < row.length(); j++) { arrList.add(slice(row.getInt(j)));
INDArray row = indices.getRow(i); for(int j = 0; j < row.length(); j++) { INDArray slice = slice(row.getInt(j));
private List<RecognisedObject> predict(INDArray predictions) { List<RecognisedObject> objects = new ArrayList<>(); int[] topNPredictions = new int[topN]; float[] topNProb = new float[topN]; String outLabels[]=new String[topN]; //brute force collect top N int i = 0; for (int batch = 0; batch < predictions.size(0); batch++) { INDArray currentBatch = predictions.getRow(batch).dup(); while (i < topN) { topNPredictions[i] = Nd4j.argMax(currentBatch, 1).getInt(0, 0); topNProb[i] = currentBatch.getFloat(batch, topNPredictions[i]); currentBatch.putScalar(0, topNPredictions[i], 0); outLabels[i]= imageNetLabels.getLabel(topNPredictions[i]); objects.add(new RecognisedObject(outLabels[i], "eng", outLabels[i], topNProb[i])); i++; } } return objects; } }
/** * Converts word to vectors * @param word word to be converted to vector * @return Vector if words exists or null otherwise */ public INDArray toVector(String word){ if (wordToId.containsKey(word)){ return embeddings.getRow(wordToId.get(word)); } return null; }
@Override public Value toProperty(int propertyId, INDArray data, long nodeId) { INDArray row = data.getRow((int) nodeId); double[] rowAsDouble = new double[row.size(1)]; for (int columnIndex = 0; columnIndex < row.size(1); columnIndex++) { rowAsDouble[columnIndex] = row.getDouble(columnIndex); } return new DoubleArray(rowAsDouble); } }
/** * Divides each row by its max * * @param toScale the matrix to divide by its row maxes */ public static void scaleByMax(INDArray toScale) { INDArray scale = toScale.max(1); for (int i = 0; i < toScale.rows(); i++) { double scaleBy = scale.getDouble(i); toScale.putRow(i, toScale.getRow(i).divi(scaleBy)); } }
/** * This method will be started in context of executor, either Shard, Client or Backup node */ @Override public void processMessage() { VectorAggregation aggregation = new VectorAggregation(rowIndex, (short) voidConfiguration.getNumberOfShards(), shardIndex, storage.getArray(key).getRow(rowIndex).dup()); aggregation.setOriginatorId(this.getOriginatorId()); transport.sendMessage(aggregation); } }
/** * This method will be started in context of executor, either Shard, Client or Backup node */ @Override public void processMessage() { VectorAggregation aggregation = new VectorAggregation(rowIndex, (short) voidConfiguration.getNumberOfShards(), shardIndex, storage.getArray(key).getRow(rowIndex).dup()); aggregation.setOriginatorId(this.getOriginatorId()); transport.sendMessage(aggregation); } }