@Override public void applyLearningRateScoreDecay() { for (Map.Entry<String, Double> lrPair : conf.getLearningRateByParam().entrySet()) conf.setLearningRateByParam(lrPair.getKey(), lrPair.getValue() * (conf.getLrPolicyDecayRate() + Nd4j.EPS_THRESHOLD)); } }
private void decreaseLearningRate(ComputationGraph computationGraph) { for (Layer layer : computationGraph.getLayers()) { if (!layer.conf().getLearningRateByParam().isEmpty()) { for (Map.Entry<String, Double> lrPair : layer.conf().getLearningRateByParam().entrySet()) { final double rate = lrPair.getValue() * (0.5 + Nd4j.EPS_THRESHOLD); layer.conf().setLearningRateByParam(lrPair.getKey(), rate); } } } }
@Override public void applyLearningRateScoreDecay() { for (Layer layer : layers) { if (!layer.conf().getLearningRateByParam().isEmpty()) { for (Map.Entry<String, Double> lrPair : layer.conf().getLearningRateByParam().entrySet()) { layer.conf().setLearningRateByParam(lrPair.getKey(), lrPair.getValue() * (layer.conf().getLrPolicyDecayRate() + Nd4j.EPS_THRESHOLD)); } } } }
@Override public org.deeplearning4j.nn.api.Layer instantiate(NeuralNetConfiguration conf, Collection<IterationListener> iterationListeners, int layerIndex, INDArray layerParamsView, boolean initializeParams) { //Need to be able to instantiate a layer, from a config - for JSON -> net type situations org.deeplearning4j.nn.api.Layer underlying = layer.instantiate(getInnerConf(conf), iterationListeners, layerIndex, layerParamsView, initializeParams); NeuralNetConfiguration nncUnderlying = underlying.conf(); if (nncUnderlying.variables() != null) { List<String> vars = nncUnderlying.variables(true); nncUnderlying.clearVariables(); conf.clearVariables(); for (String s : vars) { conf.variables(false).add(s); conf.getL1ByParam().put(s, 0.0); conf.getL2ByParam().put(s, 0.0); conf.getLearningRateByParam().put(s, 0.0); nncUnderlying.variables(false).add(s); nncUnderlying.getL1ByParam().put(s, 0.0); nncUnderlying.getL2ByParam().put(s, 0.0); nncUnderlying.getLearningRateByParam().put(s, 0.0); } } return new org.deeplearning4j.nn.layers.FrozenLayer(underlying); }
in = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNIn()); out = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNOut()); Set<String> paraNames = currentLayer.conf().getLearningRateByParam().keySet(); for (String aP : paraNames) { String paramS = ArrayUtils.toString(currentLayer.paramTable().get(aP).shape());
in = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNIn()); out = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNOut()); Set<String> paraNames = currentLayer.conf().getLearningRateByParam().keySet(); for (String aP : paraNames) { String paramS = ArrayUtils.toString(currentLayer.paramTable().get(aP).shape());
double lr = conf.getLearningRateByParam(variable);
origNNC.getL1ByParam().put(s, 0.0); origNNC.getL2ByParam().put(s, 0.0); origNNC.getLearningRateByParam().put(s, 0.0); layerNNC.getLearningRateByParam().put(s, 0.0);
for (Layer l : ((MultiLayerNetwork) model).getLayers()) { NeuralNetConfiguration conf = l.conf(); Map<String, Double> layerLrs = conf.getLearningRateByParam(); Set<String> backpropParams = l.paramTable(true).keySet(); for (Map.Entry<String, Double> entry : layerLrs.entrySet()) { Map<String, Double> layerLrs = conf.getLearningRateByParam(); String layerName = conf.getLayer().getLayerName(); Set<String> backpropParams = l.paramTable(true).keySet(); Map<String, Double> map = l.conf().getLearningRateByParam(); lrs.putAll(map);
newNNC.getL1ByParam().put(s, 0.0); newNNC.getL2ByParam().put(s, 0.0); newNNC.getLearningRateByParam().put(s, 0.0);
double lr1 = layer1.conf().getLearningRateByParam(param1); double lr2 = layer2.conf().getLearningRateByParam(param2); if (lr1 != lr2) { return false;