public NeuralNetConfiguration getInnerConf(NeuralNetConfiguration conf) { NeuralNetConfiguration nnc = conf.clone(); nnc.setLayer(layer); return nnc; }
@Override public Layer clone() { return new ActivationLayer(conf.clone()); }
@Override public Layer clone() { return new SubsamplingLayer(conf.clone()); }
public MultiLayerNetwork(MultiLayerConfiguration conf) { this.layerWiseConfigurations = conf; this.defaultConfiguration = conf.getConf(0).clone(); }
@Override public Layer clone() { return new LocalResponseNormalization(conf.clone()); }
@Override public Layer clone() { return new ZeroPaddingLayer(conf.clone()); }
@Override public ComputationGraphConfiguration clone() { ComputationGraphConfiguration conf = new ComputationGraphConfiguration(); conf.vertices = new LinkedHashMap<>(); for (Map.Entry<String, GraphVertex> entry : this.vertices.entrySet()) { conf.vertices.put(entry.getKey(), entry.getValue().clone()); } conf.vertexInputs = new LinkedHashMap<>(); for (Map.Entry<String, List<String>> entry : this.vertexInputs.entrySet()) { conf.vertexInputs.put(entry.getKey(), new ArrayList<>(entry.getValue())); } conf.networkInputs = new ArrayList<>(); conf.networkInputs = new ArrayList<>(this.networkInputs); conf.networkOutputs = new ArrayList<>(this.networkOutputs); conf.pretrain = pretrain; conf.backprop = backprop; conf.backpropType = backpropType; conf.tbpttFwdLength = tbpttFwdLength; conf.tbpttBackLength = tbpttBackLength; conf.defaultConfiguration = defaultConfiguration.clone(); conf.trainingWorkspaceMode = trainingWorkspaceMode; conf.inferenceWorkspaceMode = inferenceWorkspaceMode; conf.cacheMode = this.cacheMode; conf.defaultConfiguration.cacheMode = this.cacheMode; return conf; }
private void fineTuneConfigurationBuild() { for (int i = 0; i < origConf.getConfs().size(); i++) { NeuralNetConfiguration layerConf; if (finetuneConfiguration != null) { NeuralNetConfiguration nnc = origConf.getConf(i).clone(); finetuneConfiguration.applyToNeuralNetConfiguration(nnc); layerConf = nnc; } else { layerConf = origConf.getConf(i).clone(); } editedConfs.add(layerConf); } }
@Override public MultiLayerConfiguration clone() { try { MultiLayerConfiguration clone = (MultiLayerConfiguration) super.clone(); if (clone.confs != null) { List<NeuralNetConfiguration> list = new ArrayList<>(); for (NeuralNetConfiguration conf : clone.confs) { list.add(conf.clone()); } clone.confs = list; } if (clone.inputPreProcessors != null) { Map<Integer, InputPreProcessor> map = new HashMap<>(); for (Map.Entry<Integer, InputPreProcessor> entry : clone.inputPreProcessors.entrySet()) { map.put(entry.getKey(), entry.getValue().clone()); } clone.inputPreProcessors = map; } clone.inferenceWorkspaceMode = this.inferenceWorkspaceMode; clone.trainingWorkspaceMode = this.trainingWorkspaceMode; clone.cacheMode = this.cacheMode; return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } }
@Override public GraphVertex clone() { return new LayerVertex(layerConf.clone(), (preProcessor != null ? preProcessor.clone() : null)); }
public NeuralNetConfiguration appliedNeuralNetConfiguration(NeuralNetConfiguration nnc) { applyToNeuralNetConfiguration(nnc); nnc = new NeuralNetConfiguration.Builder(nnc.clone()).build(); return nnc; }
/** * Set parameters to selectively override existing learning parameters * Usage eg. specify a lower learning rate. This will get applied to all layers * @param fineTuneConfiguration * @return GraphBuilder */ public GraphBuilder fineTuneConfiguration(FineTuneConfiguration fineTuneConfiguration) { this.fineTuneConfiguration = fineTuneConfiguration; this.editedConfigBuilder = new ComputationGraphConfiguration.GraphBuilder(origConfig, fineTuneConfiguration.appliedNeuralNetConfigurationBuilder()); Map<String, GraphVertex> vertices = this.editedConfigBuilder.getVertices(); for (Map.Entry<String, GraphVertex> gv : vertices.entrySet()) { if (gv.getValue() instanceof LayerVertex) { LayerVertex lv = (LayerVertex) gv.getValue(); NeuralNetConfiguration nnc = lv.getLayerConf().clone(); fineTuneConfiguration.applyToNeuralNetConfiguration(nnc); vertices.put(gv.getKey(), new LayerVertex(nnc, lv.getPreProcessor())); nnc.getLayer().setLayerName(gv.getKey()); } } return this; }
(network.defaultConfiguration != null ? network.defaultConfiguration.clone() : null); if (network.input != null)
NeuralNetConfiguration layerNNC = origNNC.clone(); editedModel.getLayerWiseConfigurations().getConf(i).resetVariables(); layers[i].setConf(layerNNC);
Layer layer; try { NeuralNetConfiguration clone = conf.clone(); // assume a deep clone here
NeuralNetConfiguration newNNC = currLayerVertex.getLayerConf().clone(); currLayerVertex.setLayerConf(newNNC); currLayerVertex.getLayerConf().setLayer(newLayerConf);