/** * Get a given layer by name. */ public Layer getLayer(String name) { return verticesMap.get(name).getLayer(); //TODO checks }
/** * Get the state of the RNN layer, as used in {@link #rnnTimeStep(INDArray...)}. * * @param layerName name of the layer * @return Hidden state, or null if layer is not an RNN layer */ public Map<String, INDArray> rnnGetPreviousState(String layerName) { Layer l = verticesMap.get(layerName).getLayer(); if (l == null || !(l instanceof RecurrentLayer)) return null; return ((RecurrentLayer) l).rnnGetPreviousState(); }
/** * Set the state of the RNN layer, for use in {@link #rnnTimeStep(INDArray...)} * * @param layerName The name of the layer. * @param state The state to set the specified layer to */ public void rnnSetPreviousState(String layerName, Map<String, INDArray> state) { Layer l = verticesMap.get(layerName).getLayer(); if (l == null || !(l instanceof RecurrentLayer)) { throw new UnsupportedOperationException( "Layer \"" + layerName + "\" is not a recurrent layer. Cannot set state"); } ((RecurrentLayer) l).rnnSetPreviousState(state); }
/** * Get the parameters for the ComputationGraph * * @param backwardOnly If true: backprop parameters only (i.e., no visible layer biases used in layerwise pretraining layers) */ public INDArray params(boolean backwardOnly) { if (backwardOnly) return flattenedParams; List<INDArray> list = new ArrayList<>(layers.length); for (int i = 0; i < topologicalOrder.length; i++) { if (!vertices[topologicalOrder[i]].hasLayer()) continue; Layer l = vertices[topologicalOrder[i]].getLayer(); INDArray layerParams = l.params(); if (layerParams != null) list.add(layerParams); //may be null: subsampling etc layers } return Nd4j.toFlattened('f', list); }
/** * Pretrain network with multiple inputs and/or outputs */ public void pretrain(MultiDataSetIterator iter) { if (!configuration.isPretrain()) return; if (flattenedGradients == null) { initGradientsView(); } //Assume here that all layers are pretrainable layers for (int i = 0; i < topologicalOrder.length; i++) { if (!vertices[i].hasLayer()) continue; if (vertices[i].getLayer() instanceof IOutputLayer) continue; //Don't pretrain output layer if (!vertices[i].getLayer().isPretrainLayer()) continue; //Skip layers that aren't pretrainable pretrainLayer(vertices[i].getVertexName(), iter); } }
@Override protected Layer[] getOrderedLayers() { if (orderedLayers != null) { return orderedLayers; } GraphVertex[] vertices = network.getVertices(); //In CompGraph: we need to know topological ordering, so we know how parameters are laid out in the 1d view arrays int[] topologicalOrdering = network.topologicalSortOrder(); Layer[] out = new Layer[network.getNumLayers()]; int j = 0; for (int i = 0; i < topologicalOrdering.length; i++) { GraphVertex currentVertex = vertices[topologicalOrdering[i]]; if (!currentVertex.hasLayer()) { continue; } out[j++] = currentVertex.getLayer(); } orderedLayers = out; return orderedLayers; }
private void copyOrigParamsToSubsetGraph() { for (GraphVertex aVertex : unFrozenSubsetGraph.getVertices()) { if (!aVertex.hasLayer()) continue; aVertex.getLayer().setParams(origGraph.getLayer(aVertex.getVertexName()).params()); } }
@Override public void setBackpropGradientsViewArray(INDArray gradient) { int paramsSoFar = 0; for (int i = 0; i < topologicalOrder.length; i++) { if (!vertices[topologicalOrder[i]].hasLayer()) continue; Layer layer = vertices[topologicalOrder[i]].getLayer(); int range = layer.numParams(); if (range <= 0) continue; //Some layers: no parameters (subsampling etc) layer.setBackpropGradientsViewArray(gradient.get(NDArrayIndex.point(0), NDArrayIndex.interval(paramsSoFar, paramsSoFar + range))); paramsSoFar += range; } }
Layer ol = v.getLayer(); ol.setMaskArray(labelMaskArrays[i]);
GraphVertex gv = verticesMap.get(s); score += ((IOutputLayer) gv.getLayer()).computeScore(l1, l2, true);
INDArray out; if (current.hasLayer()) { Layer l = current.getLayer(); if (l instanceof RecurrentLayer) { out = ((RecurrentLayer) l).rnnActivateUsingStoredState(current.getInputs()[0], training,
@Override public void setParams(INDArray params) { if (params == flattenedParams) return; //No op if (this.flattenedParams != null && this.flattenedParams.length() == params.length()) { this.flattenedParams.assign(params); return; } int idx = 0; for (int i = 0; i < topologicalOrder.length; i++) { if (!vertices[topologicalOrder[i]].hasLayer()) continue; Layer layer = vertices[topologicalOrder[i]].getLayer(); int range = layer.numParams(); if (range <= 0) continue; //Some layers: no parameters (subsampling etc) INDArray get = params.get(NDArrayIndex.point(0), NDArrayIndex.interval(idx, range + idx)); layer.setParams(get); idx += range; } }
continue; org.deeplearning4j.nn.api.Layer layer = vertices[topologicalOrder[i]].getLayer(); String layerName = vertices[topologicalOrder[i]].getVertexName(); int range = layer.numParams(); if (gv.hasLayer()) { org.deeplearning4j.nn.api.Layer l = gv.getLayer(); gv.setLayerAsFrozen(); for (int j = 0; j < layers.length; j++) { if (layers[j] == l) { layers[j] = gv.getLayer(); //Place the new frozen layer to replace the original layer break;
int i = 0; for (String s : configuration.getNetworkOutputs()) { Layer outLayer = verticesMap.get(s).getLayer(); if (outLayer == null || !(outLayer instanceof IOutputLayer)) { throw new UnsupportedOperationException(
Layer outLayer = verticesMap.get(s).getLayer(); if (outLayer == null || !(outLayer instanceof IOutputLayer)) { log.warn("Cannot calculate score: vertex \"" + s + "\" is not an output layer");
Layer layer = gv.getLayer();
if (current.hasLayer()) { Layer l = current.getLayer(); if (l instanceof RecurrentLayer) { out = ((RecurrentLayer) l).rnnTimeStep(current.getInputs()[0]);