layer.setUpdater(Updater.SGD); layer.setUpdater(Updater.ADAM); layer.setUpdater(Updater.ADADELTA); n.setMomentumSchedule(momentumSchedule); layer.setUpdater(Updater.NESTEROVS); layer.setUpdater(Updater.ADAGRAD); r.setRmsDecay(rmsDecay); layer.setUpdater(Updater.RMSPROP); a.setBeta2(adamVarDecay); layer.setUpdater(Updater.ADAMAX); layer.setUpdater(Updater.NONE); } else { layer.setUpdater(null);
bLayer.setBiasInit(biasInit); if (bLayer.getUpdater() == null) bLayer.setUpdater(updater); if (bLayer.getIUpdater() == null) { bLayer.setIUpdater(iUpdater.clone());
/** * Reset the learning related configs of the layer to default. When instantiated with a global neural network configuration * the parameters specified in the neural network configuration will be used. * For internal use with the transfer learning API. Users should not have to call this method directly. */ public void resetLayerDefaultConfig() { //clear the learning related params for all layers in the origConf and set to defaults this.setUpdater(null); this.setIUpdater(null); this.setMomentum(Double.NaN); this.setWeightInit(null); this.setBiasInit(Double.NaN); this.setDist(null); this.setLearningRate(Double.NaN); this.setBiasLearningRate(Double.NaN); this.setLearningRateSchedule(null); this.setMomentumSchedule(null); this.setL1(Double.NaN); this.setL2(Double.NaN); this.setRho(Double.NaN); this.setEpsilon(Double.NaN); this.setRmsDecay(Double.NaN); this.setAdamMeanDecay(Double.NaN); this.setAdamVarDecay(Double.NaN); this.setGradientNormalization(GradientNormalization.None); this.setGradientNormalizationThreshold(1.0); }
bl.setL2Bias(l2Bias); if (updater != null) bl.setUpdater(updater); if (iUpdater != null) bl.setIUpdater(iUpdater);