/** * In place addition of a column vector * * @param rowVector the row vector to add * @return the result of the addition */ @Override public INDArray addRowVector(INDArray rowVector) { return dup().addiRowVector(rowVector); }
/** * Take the data that has been transformed to the principal components about the mean and * transform it back into the original feature set. Make sure to fill in zeroes in columns * where components were dropped! * @param data Data of the same features used to construct the PCA object but as the components * @return The records in terms of the original features */ public INDArray convertBackToFeatures(INDArray data) { return Nd4j.tensorMmul(eigenvectors, data, new int[][] {{1}, {1}}).transposei().addiRowVector(mean); }
/** * Generates a set of <i>count</i> random samples with the same variance and mean and eigenvector/values * as the data set used to initialize the PCA object, with same number of features <i>N</i>. * @param count The number of samples to generate * @return A matrix of size <i>count</i> rows by <i>N</i> columns */ public INDArray generateGaussianSamples(long count) { INDArray samples = Nd4j.randn(new long[] {count, eigenvalues.columns()}); INDArray factors = Transforms.pow(eigenvalues, -0.5, true); samples.muliRowVector(factors); return Nd4j.tensorMmul(eigenvectors, samples, new int[][] {{1}, {1}}).transposei().addiRowVector(mean); }
runningVariance.muli(runningCount).addiRowVector(mB) .addiRowVector(deltaSquared .muli((float) (runningCount * count) / (runningCount + count))) .divi(runningCount + count);
/** * Denormalize a data array * * @param array the data to denormalize * @param stats statistics of the data population */ @Override public void revert(INDArray array, INDArray maskArray, DistributionStats stats) { if (array.rank() <= 2) { array.muliRowVector(filteredStd(stats)); array.addiRowVector(stats.getMean()); } else { Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(array, filteredStd(stats), array, 1)); Nd4j.getExecutioner().execAndReturn(new BroadcastAddOp(array, stats.getMean(), array, 1)); } if (maskArray != null) { DataSetUtil.setMaskedValuesToZero(array, maskArray); } }
/** * Denormalize a data array * * @param array the data to denormalize * @param stats statistics of the data population */ @Override public void revert(INDArray array, INDArray maskArray, MinMaxStats stats) { // Subtract target range minimum value array.subi(minRange); // Scale by target range array.divi(maxRange - minRange); if (array.rank() <= 2) { array.muliRowVector(stats.getRange()); array.addiRowVector(stats.getLower()); } else { Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(array, stats.getRange(), array, 1)); Nd4j.getExecutioner().execAndReturn(new BroadcastAddOp(array, stats.getLower(), array, 1)); } if (maskArray != null) { DataSetUtil.setMaskedValuesToZero(array, maskArray); } }
INDArray ret = input.mmul(W).addi(U).addiRowVector(b);
/** * In place addition of a column vector * * @param rowVector the row vector to add * @return the result of the addition */ @Override public INDArray addRowVector(INDArray rowVector) { return dup().addiRowVector(rowVector); }
/** * Take the data that has been transformed to the principal components about the mean and * transform it back into the original feature set. Make sure to fill in zeroes in columns * where components were dropped! * @param data Data of the same features used to construct the PCA object but as the components * @return The records in terms of the original features */ public INDArray convertBackToFeatures(INDArray data) { return Nd4j.tensorMmul(eigenvectors, data, new int[][] {{1}, {1}}).transposei().addiRowVector(mean); }
private INDArray decodeGivenLatentSpaceValues(INDArray latentSpaceValues) { if (latentSpaceValues.size(1) != params.get(VariationalAutoencoderParamInitializer.PZX_MEAN_W).size(1)) { throw new IllegalArgumentException("Invalid latent space values: expected size " + params.get(VariationalAutoencoderParamInitializer.PZX_MEAN_W).size(1) + ", got size (dimension 1) = " + latentSpaceValues.size(1) + " " + layerId()); } //Do forward pass through decoder int nDecoderLayers = decoderLayerSizes.length; INDArray currentActivations = latentSpaceValues; IActivation afn = layerConf().getActivationFn(); for (int i = 0; i < nDecoderLayers; i++) { String wKey = "d" + i + WEIGHT_KEY_SUFFIX; String bKey = "d" + i + BIAS_KEY_SUFFIX; INDArray w = params.get(wKey); INDArray b = params.get(bKey); currentActivations = currentActivations.mmul(w).addiRowVector(b); afn.getActivation(currentActivations, false); } INDArray pxzw = params.get(VariationalAutoencoderParamInitializer.PXZ_W); INDArray pxzb = params.get(VariationalAutoencoderParamInitializer.PXZ_B); return currentActivations.mmul(pxzw).addiRowVector(pxzb); }
@Override public INDArray activationMean() { INDArray b = getParam(DefaultParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); return input().mmul(W).addiRowVector(b); }
/** * Generates a set of <i>count</i> random samples with the same variance and mean and eigenvector/values * as the data set used to initialize the PCA object, with same number of features <i>N</i>. * @param count The number of samples to generate * @return A matrix of size <i>count</i> rows by <i>N</i> columns */ public INDArray generateGaussianSamples(int count) { INDArray samples = Nd4j.randn(count, eigenvalues.columns()); INDArray factors = Transforms.pow(eigenvalues, -0.5, true); samples.muliRowVector(factors); return Nd4j.tensorMmul(eigenvectors, samples, new int[][] {{1}, {1}}).transposei().addiRowVector(mean); }
public INDArray decode(INDArray y) { INDArray W = getParam(PretrainParamInitializer.WEIGHT_KEY); INDArray vBias = getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY); INDArray preAct = y.mmul(W.transposei()).addiRowVector(vBias); //return Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf.getLayer().getActivationFunction(), preAct)); return layerConf().getActivationFn().getActivation(preAct, true); }
public INDArray preOutput(INDArray v, boolean training) { INDArray hBias = getParam(PretrainParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); if (training && conf.isUseDropConnect() && conf.getLayer().getDropOut() > 0) { W = Dropout.applyDropConnect(this, DefaultParamInitializer.WEIGHT_KEY); } return v.mmul(W).addiRowVector(hBias); }
@Override public INDArray preOutput(boolean training) { if (input.columns() != 1) { //Assume shape is [numExamples,1], and each entry is an integer index throw new DL4JInvalidInputException( "Cannot do forward pass for embedding layer with input more than one column. " + "Expected input shape: [numExamples,1] with each entry being an integer index " + layerId()); } int[] indexes = new int[input.length()]; for (int i = 0; i < indexes.length; i++) indexes[i] = input.getInt(i, 0); INDArray weights = getParam(DefaultParamInitializer.WEIGHT_KEY); INDArray bias = getParam(DefaultParamInitializer.BIAS_KEY); INDArray rows = Nd4j.pullRows(weights, 1, indexes); rows.addiRowVector(bias); return rows; }
public INDArray encode(INDArray v, boolean training) { INDArray W = getParam(PretrainParamInitializer.WEIGHT_KEY); if (training && conf.isUseDropConnect() && conf.getLayer().getDropOut() > 0) { W = Dropout.applyDropConnect(this, PretrainParamInitializer.WEIGHT_KEY); } INDArray hBias = getParam(PretrainParamInitializer.BIAS_KEY); INDArray preAct = v.mmul(W).addiRowVector(hBias); //INDArray ret = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf.getLayer().getActivationFunction(), preAct)); INDArray ret = layerConf().getActivationFn().getActivation(preAct, training); return ret; }
@Override public INDArray activate(boolean training) { if (input.rank() != 3) throw new UnsupportedOperationException( "Input must be rank 3. Got input with rank " + input.rank() + " " + layerId()); INDArray b = getParam(DefaultParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); if (conf.isUseDropConnect() && training) { W = Dropout.applyDropConnect(this, DefaultParamInitializer.WEIGHT_KEY); } INDArray input2d = TimeSeriesUtils.reshape3dTo2d(input); //INDArray act2d = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf.getLayer().getActivationFunction(), // input2d.mmul(W).addiRowVector(b))); INDArray act2d = layerConf().getActivationFn().getActivation(input2d.mmul(W).addiRowVector(b), training); if (maskArray != null) { act2d.muliColumnVector(maskArray); } return TimeSeriesUtils.reshape2dTo3d(act2d, input.size(0)); }
/** * Denormalize a data array * * @param array the data to denormalize * @param stats statistics of the data population */ @Override public void revert(INDArray array, INDArray maskArray, DistributionStats stats) { if (array.rank() <= 2) { array.muliRowVector(filteredStd(stats)); array.addiRowVector(stats.getMean()); } else { Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(array, filteredStd(stats), array, 1)); Nd4j.getExecutioner().execAndReturn(new BroadcastAddOp(array, stats.getMean(), array, 1)); } if (maskArray != null) { DataSetUtil.setMaskedValuesToZero(array, maskArray); } }
public INDArray preOutput(boolean training) { applyDropOutIfNecessary(training); INDArray b = getParam(DefaultParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); //Input validation: if (input.rank() != 2 || input.columns() != W.rows()) { if (input.rank() != 2) { throw new DL4JInvalidInputException("Input that is not a matrix; expected matrix (rank 2), got rank " + input.rank() + " array with shape " + Arrays.toString(input.shape()) + ". Missing preprocessor or wrong input type? " + layerId()); } throw new DL4JInvalidInputException( "Input size (" + input.columns() + " columns; shape = " + Arrays.toString(input.shape()) + ") is invalid: does not match layer input size (layer # inputs = " + W.size(0) + ") " + layerId()); } if (conf.isUseDropConnect() && training && layerConf().getDropOut() > 0) { W = Dropout.applyDropConnect(this, DefaultParamInitializer.WEIGHT_KEY); } INDArray ret = input.mmul(W).addiRowVector(b); if (maskArray != null) { applyMask(ret); } return ret; }
/** * Denormalize a data array * * @param array the data to denormalize * @param stats statistics of the data population */ @Override public void revert(INDArray array, INDArray maskArray, MinMaxStats stats) { // Subtract target range minimum value array.subi(minRange); // Scale by target range array.divi(maxRange - minRange); if (array.rank() <= 2) { array.muliRowVector(stats.getRange()); array.addiRowVector(stats.getLower()); } else { Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(array, stats.getRange(), array, 1)); Nd4j.getExecutioner().execAndReturn(new BroadcastAddOp(array, stats.getLower(), array, 1)); } if (maskArray != null) { DataSetUtil.setMaskedValuesToZero(array, maskArray); } }