if (conf.isUseDropConnect() && training && conf.getLayer().getDropOut() > 0) { W = Dropout.applyDropConnect(this, CDAEParamInitializer.WEIGHT_KEY);
public INDArray preOutput(INDArray v, boolean training) { INDArray hBias = getParam(PretrainParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); if (training && conf.isUseDropConnect() && conf.getLayer().getDropOut() > 0) { W = Dropout.applyDropConnect(this, DefaultParamInitializer.WEIGHT_KEY); } return v.mmul(W).addiRowVector(hBias); }
public INDArray encode(INDArray v, boolean training) { INDArray W = getParam(PretrainParamInitializer.WEIGHT_KEY); if (training && conf.isUseDropConnect() && conf.getLayer().getDropOut() > 0) { W = Dropout.applyDropConnect(this, PretrainParamInitializer.WEIGHT_KEY); } INDArray hBias = getParam(PretrainParamInitializer.BIAS_KEY); INDArray preAct = v.mmul(W).addiRowVector(hBias); //INDArray ret = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf.getLayer().getActivationFunction(), preAct)); INDArray ret = layerConf().getActivationFn().getActivation(preAct, training); return ret; }
@Override public INDArray activate(boolean training) { if (input.rank() != 3) throw new UnsupportedOperationException( "Input must be rank 3. Got input with rank " + input.rank() + " " + layerId()); INDArray b = getParam(DefaultParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); if (conf.isUseDropConnect() && training) { W = Dropout.applyDropConnect(this, DefaultParamInitializer.WEIGHT_KEY); } INDArray input2d = TimeSeriesUtils.reshape3dTo2d(input); //INDArray act2d = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf.getLayer().getActivationFunction(), // input2d.mmul(W).addiRowVector(b))); INDArray act2d = layerConf().getActivationFn().getActivation(input2d.mmul(W).addiRowVector(b), training); if (maskArray != null) { act2d.muliColumnVector(maskArray); } return TimeSeriesUtils.reshape2dTo3d(act2d, input.size(0)); }
public INDArray preOutput(boolean training) { applyDropOutIfNecessary(training); INDArray b = getParam(DefaultParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); //Input validation: if (input.rank() != 2 || input.columns() != W.rows()) { if (input.rank() != 2) { throw new DL4JInvalidInputException("Input that is not a matrix; expected matrix (rank 2), got rank " + input.rank() + " array with shape " + Arrays.toString(input.shape()) + ". Missing preprocessor or wrong input type? " + layerId()); } throw new DL4JInvalidInputException( "Input size (" + input.columns() + " columns; shape = " + Arrays.toString(input.shape()) + ") is invalid: does not match layer input size (layer # inputs = " + W.size(0) + ") " + layerId()); } if (conf.isUseDropConnect() && training && layerConf().getDropOut() > 0) { W = Dropout.applyDropConnect(this, DefaultParamInitializer.WEIGHT_KEY); } INDArray ret = input.mmul(W).addiRowVector(b); if (maskArray != null) { applyMask(ret); } return ret; }
protected void applyDropOutIfNecessary(boolean training) { if (layerConf().getDropOut() > 0 && !conf.isUseDropConnect() && training && !dropoutApplied) { if (Nd4j.getWorkspaceManager().checkIfWorkspaceExists(ComputationGraph.workspaceExternal)) { try (MemoryWorkspace ws = Nd4j.getWorkspaceManager() .getWorkspaceForCurrentThread(ComputationGraph.workspaceExternal) .notifyScopeBorrowed()) { input = input.isView() ? input.dup() : input.unsafeDuplication(); } } else input = input.isView() ? input.dup() : input.unsafeDuplication(); Dropout.applyDropout(input, layerConf().getDropOut()); dropoutApplied = true; } }
INDArray weights = getParam(ConvolutionParamInitializer.WEIGHT_KEY); INDArray bias = getParam(ConvolutionParamInitializer.BIAS_KEY); if (conf.isUseDropConnect() && training && conf.getLayer().getDropOut() > 0) { weights = Dropout.applyDropConnect(this, ConvolutionParamInitializer.WEIGHT_KEY);
adamMeanDecay, adamVarDecay, rho, rmsDecay, epsilon); boolean useDropCon = (useDropConnect == null ? nnc.isUseDropConnect() : useDropConnect); LayerValidation.generalValidation(l.getLayerName(), l, nnc.isUseRegularization(), useDropCon, dropOut, l2, l2Bias, l1, l1Bias, dist);
if (conf.isUseDropConnect() && training && conf.getLayer().getDropOut() > 0) { inputWeights = Dropout.applyDropConnect(layer, inputWeightKey);