@Override public org.deeplearning4j.nn.graph.vertex.GraphVertex instantiate(ComputationGraph graph, String name, int idx, INDArray paramsView, boolean initializeParams) { //Now, we need to work out if this vertex is an output vertex or not... boolean isOutput = graph.getConfiguration().getNetworkOutputs().contains(name); org.deeplearning4j.nn.api.Layer layer = layerConf.getLayer().instantiate(layerConf, null, idx, paramsView, initializeParams); return new org.deeplearning4j.nn.graph.vertex.impl.LayerVertex(graph, name, idx, layer, preProcessor, isOutput); }
@Override public org.deeplearning4j.nn.api.Layer instantiate(NeuralNetConfiguration conf, Collection<IterationListener> iterationListeners, int layerIndex, INDArray layerParamsView, boolean initializeParams) { //Need to be able to instantiate a layer, from a config - for JSON -> net type situations org.deeplearning4j.nn.api.Layer underlying = layer.instantiate(getInnerConf(conf), iterationListeners, layerIndex, layerParamsView, initializeParams); NeuralNetConfiguration nncUnderlying = underlying.conf(); if (nncUnderlying.variables() != null) { List<String> vars = nncUnderlying.variables(true); nncUnderlying.clearVariables(); conf.clearVariables(); for (String s : vars) { conf.variables(false).add(s); conf.getL1ByParam().put(s, 0.0); conf.getL2ByParam().put(s, 0.0); conf.getLearningRateByParam().put(s, 0.0); nncUnderlying.variables(false).add(s); nncUnderlying.getL1ByParam().put(s, 0.0); nncUnderlying.getL2ByParam().put(s, 0.0); nncUnderlying.getLearningRateByParam().put(s, 0.0); } } return new org.deeplearning4j.nn.layers.FrozenLayer(underlying); }
if (numParams > 0) { params = Nd4j.create(1, numParams); org.deeplearning4j.nn.api.Layer someLayer = layer.instantiate(layerConf, null, 0, params, true); appendParams.add(someLayer.params()); appendConfs.add(someLayer.conf());
private void nOutReplaceBuild(int layerNum, int nOut, Pair<WeightInit, Distribution> schemedist, Pair<WeightInit, Distribution> schemedistNext) { NeuralNetConfiguration layerConf = editedConfs.get(layerNum); Layer layerImpl = layerConf.getLayer(); //not a clone need to modify nOut in place FeedForwardLayer layerImplF = (FeedForwardLayer) layerImpl; layerImplF.setWeightInit(schemedist.getLeft()); layerImplF.setDist(schemedist.getRight()); layerImplF.setNOut(nOut); int numParams = layerImpl.initializer().numParams(layerConf); INDArray params = Nd4j.create(1, numParams); org.deeplearning4j.nn.api.Layer someLayer = layerImpl.instantiate(layerConf, null, 0, params, true); editedParams.set(layerNum, someLayer.params()); if (layerNum + 1 < editedConfs.size()) { layerConf = editedConfs.get(layerNum + 1); layerImpl = layerConf.getLayer(); //modify in place layerImplF = (FeedForwardLayer) layerImpl; layerImplF.setWeightInit(schemedistNext.getLeft()); layerImplF.setDist(schemedistNext.getRight()); layerImplF.setNIn(nOut); numParams = layerImpl.initializer().numParams(layerConf); if (numParams > 0) { params = Nd4j.create(1, numParams); someLayer = layerImpl.instantiate(layerConf, null, 0, params, true); editedParams.set(layerNum + 1, someLayer.params()); } } }
layer = clone.getLayer().instantiate(clone, iterationListeners, this.index, paramsView, true);
layers[i] = conf.getLayer().instantiate(conf, listeners, i, paramsView, initializeParams); layerMap.put(conf.getLayer().getLayerName(), layers[i]);