/** * Set parameters to selectively override existing learning parameters * Usage eg. specify a lower learning rate. This will get applied to all layers * @param fineTuneConfiguration * @return GraphBuilder */ public GraphBuilder fineTuneConfiguration(FineTuneConfiguration fineTuneConfiguration) { this.fineTuneConfiguration = fineTuneConfiguration; this.editedConfigBuilder = new ComputationGraphConfiguration.GraphBuilder(origConfig, fineTuneConfiguration.appliedNeuralNetConfigurationBuilder()); Map<String, GraphVertex> vertices = this.editedConfigBuilder.getVertices(); for (Map.Entry<String, GraphVertex> gv : vertices.entrySet()) { if (gv.getValue() instanceof LayerVertex) { LayerVertex lv = (LayerVertex) gv.getValue(); NeuralNetConfiguration nnc = lv.getLayerConf().clone(); fineTuneConfiguration.applyToNeuralNetConfiguration(nnc); vertices.put(gv.getKey(), new LayerVertex(nnc, lv.getPreProcessor())); nnc.getLayer().setLayerName(gv.getKey()); } } return this; }
private void fineTuneConfigurationBuild() { for (int i = 0; i < origConf.getConfs().size(); i++) { NeuralNetConfiguration layerConf; if (finetuneConfiguration != null) { NeuralNetConfiguration nnc = origConf.getConf(i).clone(); finetuneConfiguration.applyToNeuralNetConfiguration(nnc); layerConf = nnc; } else { layerConf = origConf.getConf(i).clone(); } editedConfs.add(layerConf); } }
private MultiLayerConfiguration constructConf() { //use the editedConfs list to make a new config List<NeuralNetConfiguration> allConfs = new ArrayList<>(); allConfs.addAll(editedConfs); allConfs.addAll(appendConfs); //Set default layer names, if not set - as per NeuralNetConfiguration.ListBuilder.build() for (int i = 0; i < allConfs.size(); i++) { if (allConfs.get(i).getLayer().getLayerName() == null) { allConfs.get(i).getLayer().setLayerName("layer" + i); } } MultiLayerConfiguration conf = new MultiLayerConfiguration.Builder().inputPreProcessors(inputPreProcessors) .setInputType(this.inputType).confs(allConfs).build(); if (finetuneConfiguration != null) { finetuneConfiguration.applyToMultiLayerConfiguration(conf); } return conf; } }
finetuneConfiguration.appliedNeuralNetConfigurationBuilder().layer(layer).build();
public NeuralNetConfiguration appliedNeuralNetConfiguration(NeuralNetConfiguration nnc) { applyToNeuralNetConfiguration(nnc); nnc = new NeuralNetConfiguration.Builder(nnc.clone()).build(); return nnc; }