private void fineTuneConfigurationBuild() { for (int i = 0; i < origConf.getConfs().size(); i++) { NeuralNetConfiguration layerConf; if (finetuneConfiguration != null) { NeuralNetConfiguration nnc = origConf.getConf(i).clone(); finetuneConfiguration.applyToNeuralNetConfiguration(nnc); layerConf = nnc; } else { layerConf = origConf.getConf(i).clone(); } editedConfs.add(layerConf); } }
public MultiLayerNetwork(MultiLayerConfiguration conf) { this.layerWiseConfigurations = conf; this.defaultConfiguration = conf.getConf(0).clone(); }
@Override public MultiLayerConfiguration deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { MultiLayerConfiguration conf = (MultiLayerConfiguration) defaultDeserializer.deserialize(jp, ctxt); //Updater configuration changed after 0.8.0 release //Previously: enumerations and fields. Now: classes //Here, we manually create the appropriate Updater instances, if the IUpdater field is empty Layer[] layers = new Layer[conf.getConfs().size()]; for (int i = 0; i < layers.length; i++) { layers[i] = conf.getConf(i).getLayer(); } handleUpdaterBackwardCompatibility(layers); return conf; } }
int confIdx = layerIdx - 1; //-1 because of input if (confIdx >= 0) { nnc = conf.getConf(confIdx); layer = nnc.getLayer(); } else {
try { MultiLayerConfiguration conf = MultiLayerConfiguration.fromJson(FileUtils.readFileToString(new File(modelPath))); FeedForwardLayer outputLayer = (FeedForwardLayer) conf.getConf(conf.getConfs().size() - 1).getLayer();
int confIdx = layerIdx - 1; //-1 because of input if (confIdx >= 0) { nnc = conf.getConf(confIdx); layer = nnc.getLayer(); } else {
int confIdx = layerIdx - 1; //-1 because of input if (confIdx >= 0) { nnc = conf.getConf(confIdx); layer = nnc.getLayer(); } else {
conf.cacheMode = cacheMode; Nd4j.getRandom().setSeed(conf.getConf(0).getSeed()); return conf;
NeuralNetConfiguration origNNC = editedModel.getLayerWiseConfigurations().getConf(i); NeuralNetConfiguration layerNNC = origNNC.clone(); editedModel.getLayerWiseConfigurations().getConf(i).resetVariables(); layers[i].setConf(layerNNC); layers[i] = new FrozenLayer(layers[i]); Layer origLayerConf = editedModel.getLayerWiseConfigurations().getConf(i).getLayer(); Layer newLayerConf = new org.deeplearning4j.nn.conf.layers.misc.FrozenLayer(origLayerConf); newLayerConf.setLayerName(origLayerConf.getLayerName()); editedModel.getLayerWiseConfigurations().getConf(i).setLayer(newLayerConf);
/** * This method: initializes the flattened gradients array (used in backprop) and sets the appropriate subset in all layers. * As a general rule, this shouldn't ever need to be called manually when doing training via fit(DataSet) or fit(DataSetIterator) */ public void initGradientsView() { try (MemoryWorkspace ws = Nd4j.getMemoryManager().scopeOutOfWorkspaces()) { if (layers == null) init(); int nLayers = layers.length; //First: Work out total length of params int paramLength = 0; int[] nParamsPerLayer = new int[nLayers]; for (int i = 0; i < nLayers; i++) { NeuralNetConfiguration conf = layerWiseConfigurations.getConf(i); nParamsPerLayer[i] = conf.getLayer().initializer().numParams(conf); paramLength += nParamsPerLayer[i]; } flattenedGradients = Nd4j.zeros(new int[] {1, paramLength}, 'f'); //No need to initialize, as each layer will do it each iteration anyway int backpropParamsSoFar = 0; for (int i = 0; i < layers.length; i++) { if (nParamsPerLayer[i] == 0) continue; //This layer doesn't have any parameters... INDArray thisLayerGradView = flattenedGradients.get(NDArrayIndex.point(0), NDArrayIndex.interval(backpropParamsSoFar, backpropParamsSoFar + nParamsPerLayer[i])); layers[i].setBackpropGradientsViewArray(thisLayerGradView); backpropParamsSoFar += nParamsPerLayer[i]; } } }
int[] nParamsPerLayer = new int[nLayers]; for (int i = 0; i < nLayers; i++) { NeuralNetConfiguration conf = layerWiseConfigurations.getConf(i); nParamsPerLayer[i] = conf.getLayer().initializer().numParams(conf); paramLength += nParamsPerLayer[i]; NeuralNetConfiguration conf = layerWiseConfigurations.getConf(i); layers[i] = conf.getLayer().instantiate(conf, listeners, i, paramsView, initializeParams); layerMap.put(conf.getLayer().getLayerName(), layers[i]);