public static void saveModel(FileSystem fs, Model model ) throws Exception{ String json = null; if (model instanceof MultiLayerNetwork) { json = ((MultiLayerNetwork)model).getLayerWiseConfigurations().toJson(); } else if (model instanceof ComputationGraph) { json = ((ComputationGraph)model).getConfiguration().toJson(); } byte [] byts = json.getBytes(); FSDataOutputStream out = fs.create(new Path(modelPath)); out.write(byts); out.hsync(); fs.close(); } }
/** * setup the local DBN instance based on conf params * * * */ @Override public void setup(Configuration conf) { log.info("Worker-Conf: " + conf.get(MULTI_LAYER_CONF)); this.batchSize = conf.getInt("org.deeplearning4j.batchSize", 10); this.numberClasses = conf.getInt("org.deeplearning4j.numberClasses", 2); this.numberFeatures = conf.getInt("org.deeplearning4j.features", 5); log.info("Classes: " + this.numberClasses + ", Features: " + this.numberFeatures); MultiLayerConfiguration conf2 = MultiLayerConfiguration.fromJson( conf.get(MULTI_LAYER_CONF)); multiLayerNetwork = new MultiLayerNetwork(conf2); }
MultiLayerConfiguration conf = MultiLayerConfiguration.fromJson( ((MultiLayerNetwork) originalModel).getLayerWiseConfigurations().toJson()); conf.setTrainingWorkspaceMode(workspaceMode); this.replicatedModel = new MultiLayerNetwork(conf); .setTrainingWorkspaceMode(workspaceMode);
return MultiLayerConfiguration.fromJson(input); } catch (Exception e) { log.warn("Tried multi layer config from json", e); log.warn("Tried computation graph from json"); try { return MultiLayerConfiguration.fromYaml(input); } catch (Exception e4) { log.warn("Tried multi layer configuration from yaml");
/** * Get the number of layers in the network * * @return the number of layers in the network */ public int getnLayers() { return layerWiseConfigurations.getConfs().size(); }
private void fineTuneConfigurationBuild() { for (int i = 0; i < origConf.getConfs().size(); i++) { NeuralNetConfiguration layerConf; if (finetuneConfiguration != null) { NeuralNetConfiguration nnc = origConf.getConf(i).clone(); finetuneConfiguration.applyToNeuralNetConfiguration(nnc); layerConf = nnc; } else { layerConf = origConf.getConf(i).clone(); } editedConfs.add(layerConf); } }
numParams = m.numParams(); numLayers = m.getnLayers(); modelConfigJson = m.getLayerWiseConfigurations().toJson(); totalNumUpdates = m.getLayerWiseConfigurations().getIterationCount(); } else if(candidate instanceof ComputationGraph) { ComputationGraph cg = (ComputationGraph)candidate;
layerWiseConfigurations.getTrainingWorkspaceMode(), layerWiseConfigurations.getInferenceWorkspaceMode()); if (layerWiseConfigurations.getCacheMode() == CacheMode.HOST) { workspaceConfigurationCache.setPolicyMirroring(MirroringPolicy.HOST_ONLY); int[] nParamsPerLayer = new int[nLayers]; for (int i = 0; i < nLayers; i++) { NeuralNetConfiguration conf = layerWiseConfigurations.getConf(i); nParamsPerLayer[i] = conf.getLayer().initializer().numParams(conf); paramLength += nParamsPerLayer[i]; NeuralNetConfiguration conf = layerWiseConfigurations.getConf(i); layers[i] = conf.getLayer().instantiate(conf, listeners, i, paramsView, initializeParams); layerMap.put(conf.getLayer().getLayerName(), layers[i]);
public MultiLayerNetwork(MultiLayerConfiguration conf) { this.layerWiseConfigurations = conf; this.defaultConfiguration = conf.getConf(0).clone(); }
/** Calculate the output of the network, with masking arrays. The masking arrays are used in situations such * as one-to-many and many-to-one recurrent neural network (RNN) designs, as well as for supporting time series * of varying lengths within the same minibatch. */ public INDArray output(INDArray input, boolean train, INDArray featuresMask, INDArray labelsMask) { WorkspaceMode cMode = layerWiseConfigurations.getTrainingWorkspaceMode(); layerWiseConfigurations.setTrainingWorkspaceMode(layerWiseConfigurations.getInferenceWorkspaceMode()); MemoryWorkspace workspace = layerWiseConfigurations.getTrainingWorkspaceMode() == WorkspaceMode.NONE ? new DummyWorkspace() : Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread( workspaceConfigurationExternal, workspaceExternal); try (MemoryWorkspace wsE = workspace.notifyScopeEntered()) { INDArray ret = silentOutput(input, train, featuresMask, labelsMask).detach(); layerWiseConfigurations.setTrainingWorkspaceMode(cMode); return ret; } }
MultiLayerConfiguration conf = new MultiLayerConfiguration(); conf.confs = this.confs; conf.pretrain = pretrain; conf.cacheMode = cacheMode; Nd4j.getRandom().setSeed(conf.getConf(0).getSeed()); return conf;
MultiLayerNetwork network = (MultiLayerNetwork) obj; boolean paramsEquals = network.params().equals(params()); boolean confEquals = getLayerWiseConfigurations().equals(network.getLayerWiseConfigurations()); boolean updaterEquals = getUpdater().equals(network.getUpdater()); return paramsEquals && confEquals && updaterEquals;
/** * JSON model configuration passed in * If you are entering a MultiLayerConfiguration JSON, * your file name MUST contain '_multi'. * Otherwise, it will be processed as a regular * NeuralNetConfiguration * * Takes in JSON file path * Checks file path for indication of MultiLayer * Reads JSON file to string * Creates neural net configuration from string config * */ @Override public <E> E value(String value) throws Exception { Boolean isMultiLayer = value.contains("_multi"); String json = FileUtils.readFileToString(new File(value)); if (isMultiLayer) { return (E) MultiLayerConfiguration.fromJson(json); } else { return (E) NeuralNetConfiguration.fromJson(json); } }
MultiLayerConfiguration conf = MultiLayerConfiguration.fromJson( ((MultiLayerNetwork) originalModel).getLayerWiseConfigurations().toJson()); conf.setTrainingWorkspaceMode(workspaceMode); this.replicatedModel = new MultiLayerNetwork(conf); .setTrainingWorkspaceMode(workspaceMode);
/** * Prints the configuration */ public void printConfiguration() { StringBuilder sb = new StringBuilder(); int count = 0; for (NeuralNetConfiguration conf : getLayerWiseConfigurations().getConfs()) { sb.append(" Layer " + count++ + " conf " + conf); } log.info(sb.toString()); }