vs.getLayer().conf().setLearningRateByParam(vs.getParamName(), newLr); if (((BaseLayer) layer.conf().getLayer()).getIUpdater() instanceof Nesterovs) { ((BaseLayer) vs.getLayer().conf().getLayer()).setMomentum(newMomentum);
case NESTEROVS: if (Double.isNaN(momentum) && Double.isNaN(layer.getMomentum())) { layer.setMomentum(Nesterovs.DEFAULT_NESTEROV_MOMENTUM); } else if (Double.isNaN(layer.getMomentum())) layer.setMomentum(momentum); if (momentumSchedule != null && layer.getMomentumSchedule() == null) layer.setMomentumSchedule(momentumSchedule);
/** * Reset the learning related configs of the layer to default. When instantiated with a global neural network configuration * the parameters specified in the neural network configuration will be used. * For internal use with the transfer learning API. Users should not have to call this method directly. */ public void resetLayerDefaultConfig() { //clear the learning related params for all layers in the origConf and set to defaults this.setUpdater(null); this.setIUpdater(null); this.setMomentum(Double.NaN); this.setWeightInit(null); this.setBiasInit(Double.NaN); this.setDist(null); this.setLearningRate(Double.NaN); this.setBiasLearningRate(Double.NaN); this.setLearningRateSchedule(null); this.setMomentumSchedule(null); this.setL1(Double.NaN); this.setL2(Double.NaN); this.setRho(Double.NaN); this.setEpsilon(Double.NaN); this.setRmsDecay(Double.NaN); this.setAdamMeanDecay(Double.NaN); this.setAdamVarDecay(Double.NaN); this.setGradientNormalization(GradientNormalization.None); this.setGradientNormalizationThreshold(1.0); }
bl.setIUpdater(iUpdater); if (momentum != null) bl.setMomentum(momentum); if (momentumSchedule != null) bl.setMomentum(momentum); if (epsilon != null) bl.setEpsilon(epsilon); case NESTEROVS: if (momentum == null) bl.setMomentum(Double.NaN); if (momentumSchedule == null) bl.setMomentumSchedule(null);