/** * Adds a feature for each example on to the current feature vector * * @param toAdd the feature vector to add */ @Override public void addFeatureVector(INDArray toAdd) { setFeatures(Nd4j.hstack(getFeatureMatrix(), toAdd)); }
INDArray hstack = Nd4j.hstack(ones,zeros); System.out.println("### HSTACK ####"); System.out.println(hstack);
INDArray hStack = Nd4j.hstack(rowVector1, rowVector2); //Horizontal stack: [1,3]+[1,3] to [1,6] System.out.println("\n\n\nCreating INDArrays from other INDArrays, using hstack and vstack:"); System.out.println("vStack:\n" + vStack);
private INDArray constructParams() { //some params will be null for subsampling etc INDArray keepView = null; for (INDArray aParam : editedParams) { if (aParam != null) { if (keepView == null) { keepView = aParam; } else { keepView = Nd4j.hstack(keepView, aParam); } } } if (!appendParams.isEmpty()) { INDArray appendView = Nd4j.hstack(appendParams); return Nd4j.hstack(keepView, appendView); } else { return keepView; } }
@Override public INDArray getAccumulatedResult() { if (aggregationWidth == 1) { return chunks.get((short) 0); } else return Nd4j.hstack(chunks.values()); }
@Override public INDArray getAccumulatedResult() { if (aggregationWidth == 1) { return chunks.get((short) 0); } else return Nd4j.hstack(chunks.values()); }
/** * Adds a feature for each example on to the current feature vector * * @param toAdd the feature vector to add */ @Override public void addFeatureVector(INDArray toAdd) { setFeatures(Nd4j.hstack(getFeatureMatrix(), toAdd)); }
@Override public FederatedDataSet getTestData() { Random rand = new Random(seed); int numSamples = N_SAMPLES/10; double[] sum = new double[numSamples]; double[] input1 = new double[numSamples]; double[] input2 = new double[numSamples]; for (int i = 0; i < numSamples; i++) { input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble(); input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble(); sum[i] = input1[i] + input2[i]; } INDArray inputNDArray1 = Nd4j.create(input1, new int[]{numSamples, 1}); INDArray inputNDArray2 = Nd4j.create(input2, new int[]{numSamples, 1}); INDArray inputNDArray = Nd4j.hstack(inputNDArray1, inputNDArray2); INDArray outPut = Nd4j.create(sum, new int[]{numSamples, 1}); return new FederatedDataSetImpl(new DataSet(inputNDArray, outPut)); }
@Override public FederatedDataSet getTrainingData() { Random rand = new Random(seed); double[] sum = new double[N_SAMPLES]; double[] input1 = new double[N_SAMPLES]; double[] input2 = new double[N_SAMPLES]; for (int i = 0; i < N_SAMPLES; i++) { input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble(); input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble(); sum[i] = input1[i] + input2[i]; } INDArray inputNDArray1 = Nd4j.create(input1, new int[]{N_SAMPLES, 1}); INDArray inputNDArray2 = Nd4j.create(input2, new int[]{N_SAMPLES, 1}); INDArray inputNDArray = Nd4j.hstack(inputNDArray1, inputNDArray2); INDArray outPut = Nd4j.create(sum, new int[]{N_SAMPLES, 1}); DataSet dataSet = new DataSet(inputNDArray, outPut); dataSet.shuffle(); return new FederatedDataSetImpl(dataSet); }
final INDArray nodeFeatures = Nd4j.hstack(arrays); embedding.putRow(nodeId, nodeFeatures);
private static DataSetIterator getTrainingData(int batchSize, Random rand) { double [] sum = new double[nSamples]; double [] input1 = new double[nSamples]; double [] input2 = new double[nSamples]; for (int i= 0; i< nSamples; i++) { input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble(); input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble(); sum[i] = input1[i] + input2[i]; } INDArray inputNDArray1 = Nd4j.create(input1, new int[]{nSamples,1}); INDArray inputNDArray2 = Nd4j.create(input2, new int[]{nSamples,1}); INDArray inputNDArray = Nd4j.hstack(inputNDArray1,inputNDArray2); INDArray outPut = Nd4j.create(sum, new int[]{nSamples, 1}); DataSet dataSet = new DataSet(inputNDArray, outPut); List<DataSet> listDs = dataSet.asList(); return new ListDataSetIterator(listDs,batchSize); } }
public Embedding prune(Embedding prevEmbedding, Embedding embedding) { INDArray embeddingToPrune = Nd4j.hstack(prevEmbedding.getNDEmbedding(), embedding.getNDEmbedding()); Feature[] featuresToPrune = ArrayUtils.addAll(prevEmbedding.getFeatures(), embedding.getFeatures()); progressLogger.log("Feature Pruning: Creating features graph"); final Graph graph = loadFeaturesGraph(embeddingToPrune, prevEmbedding.features.length); progressLogger.log("Feature Pruning: Created features graph"); progressLogger.log("Feature Pruning: Finding features to keep"); int[] featureIdsToKeep = findConnectedComponents(graph) .collect(Collectors.groupingBy(item -> item.setId)) .values() .stream() .mapToInt(results -> results.stream().mapToInt(value -> (int) value.nodeId).min().getAsInt()) .toArray(); progressLogger.log("Feature Pruning: Found features to keep"); progressLogger.log("Feature Pruning: Pruning embeddings"); INDArray prunedNDEmbedding = pruneEmbedding(embeddingToPrune, featureIdsToKeep); progressLogger.log("Feature Pruning: Pruned embeddings"); Feature[] prunedFeatures = new Feature[featureIdsToKeep.length]; for (int index = 0; index < featureIdsToKeep.length; index++) { prunedFeatures[index] = featuresToPrune[featureIdsToKeep[index]]; } return new Embedding(prunedFeatures, prunedNDEmbedding); }
@Override public INDArray ndOp(INDArray features, INDArray adjacencyMatrix) { INDArray[] maxes = new INDArray[features.columns()]; for (int fCol = 0; fCol < features.columns(); fCol++) { INDArray mul = adjacencyMatrix.transpose().mulColumnVector(features.getColumn(fCol)); maxes[fCol] = mul.max(0).transpose(); } return Nd4j.hstack(maxes); }
f = Nd4j.hstack(f1, f2); } else {
f = Nd4j.hstack(f1, f2); } else {