@OptionMetadata( displayName = "layer name", description = "The name of the layer (default = Batch normalization Layer).", commandLineParamName = "name", commandLineParamSynopsis = "-name <string>", displayOrder = 0 ) public String getLayerName() { return backend.getLayerName(); }
public LayerUpdater(Layer layer, INDArray updaterState) { super(layer, updaterState); if (layer instanceof MultiLayerNetwork) { throw new UnsupportedOperationException("Cannot use LayerUpdater for a MultiLayerNetwork"); } layersByName = new HashMap<>(); layersByName.put(layer.conf().getLayer().getLayerName(), layer); }
public Map<String, INDArray> paramTable(boolean backpropParamsOnly) { //Get all parameters from all layers Map<String, INDArray> allParams = new LinkedHashMap<>(); for (Layer layer : layers) { Map<String, INDArray> paramMap = layer.paramTable(backpropParamsOnly); for (Map.Entry<String, INDArray> entry : paramMap.entrySet()) { String newKey = layer.conf().getLayer().getLayerName() + "_" + entry.getKey(); allParams.put(newKey, entry.getValue()); } } return allParams; }
public ComputationGraphUpdater(ComputationGraph graph, INDArray updaterState) { super(graph, updaterState); layersByName = new HashMap<>(); Layer[] layers = getOrderedLayers(); for (Layer l : layers) { layersByName.put(l.conf().getLayer().getLayerName(), l); } }
protected String layerId() { String name = this.conf().getLayer().getLayerName(); return "(layer name: " + (name == null ? "\"\"" : name) + ", layer index: " + index + ")"; }
protected String layerId() { String name = this.conf().getLayer().getLayerName(); return "(layer name: " + (name == null ? "\"\"" : name) + ", layer index: " + index + ")"; }
public static GraphInfo buildGraphInfo(MultiLayerConfiguration config) { List<String> vertexNames = new ArrayList<>(); List<String> originalVertexName = new ArrayList<>(); List<String> layerTypes = new ArrayList<>(); List<List<Integer>> layerInputs = new ArrayList<>(); List<Map<String, String>> layerInfo = new ArrayList<>(); vertexNames.add("Input"); originalVertexName.add(null); layerTypes.add("Input"); layerInputs.add(Collections.emptyList()); layerInfo.add(Collections.emptyMap()); List<NeuralNetConfiguration> list = config.getConfs(); int layerIdx = 1; for (NeuralNetConfiguration c : list) { Layer layer = c.getLayer(); String layerName = layer.getLayerName(); if (layerName == null) layerName = "layer" + layerIdx; vertexNames.add(layerName); originalVertexName.add(String.valueOf(layerIdx - 1)); String layerType = c.getLayer().getClass().getSimpleName().replaceAll("Layer$", ""); layerTypes.add(layerType); layerInputs.add(Collections.singletonList(layerIdx - 1)); layerIdx++; //Extract layer info Map<String, String> map = getLayerInfo(c, layer); layerInfo.add(map); } return new GraphInfo(vertexNames, layerTypes, layerInputs, layerInfo, originalVertexName); }
public static GraphInfo buildGraphInfo(MultiLayerConfiguration config) { List<String> vertexNames = new ArrayList<>(); List<String> originalVertexName = new ArrayList<>(); List<String> layerTypes = new ArrayList<>(); List<List<Integer>> layerInputs = new ArrayList<>(); List<Map<String, String>> layerInfo = new ArrayList<>(); vertexNames.add("Input"); originalVertexName.add(null); layerTypes.add("Input"); layerInputs.add(Collections.emptyList()); layerInfo.add(Collections.emptyMap()); List<NeuralNetConfiguration> list = config.getConfs(); int layerIdx = 1; for (NeuralNetConfiguration c : list) { Layer layer = c.getLayer(); String layerName = layer.getLayerName(); if (layerName == null) layerName = "layer" + layerIdx; vertexNames.add(layerName); originalVertexName.add(String.valueOf(layerIdx - 1)); String layerType = c.getLayer().getClass().getSimpleName().replaceAll("Layer$", ""); layerTypes.add(layerType); layerInputs.add(Collections.singletonList(layerIdx - 1)); layerIdx++; //Extract layer info Map<String, String> map = getLayerInfo(c, layer); layerInfo.add(map); } return new GraphInfo(vertexNames, layerTypes, layerInputs, layerInfo, originalVertexName); }
public static GraphInfo buildGraphInfo(MultiLayerConfiguration config) { List<String> vertexNames = new ArrayList<>(); List<String> originalVertexName = new ArrayList<>(); List<String> layerTypes = new ArrayList<>(); List<List<Integer>> layerInputs = new ArrayList<>(); List<Map<String, String>> layerInfo = new ArrayList<>(); vertexNames.add("Input"); originalVertexName.add(null); layerTypes.add("Input"); layerInputs.add(Collections.emptyList()); layerInfo.add(Collections.emptyMap()); List<NeuralNetConfiguration> list = config.getConfs(); int layerIdx = 1; for (NeuralNetConfiguration c : list) { Layer layer = c.getLayer(); String layerName = layer.getLayerName(); if (layerName == null) layerName = "layer" + layerIdx; vertexNames.add(layerName); originalVertexName.add(String.valueOf(layerIdx - 1)); String layerType = c.getLayer().getClass().getSimpleName().replaceAll("Layer$", ""); layerTypes.add(layerType); layerInputs.add(Collections.singletonList(layerIdx - 1)); layerIdx++; //Extract layer info Map<String, String> map = getLayerInfo(c, layer); layerInfo.add(map); } return new GraphInfo(vertexNames, layerTypes, layerInputs, layerInfo, originalVertexName); }
/** * Get a map of states for ALL RNN layers, as used in {@link #rnnTimeStep(INDArray...)}. * Layers that are not RNN layers will not have an entry in the returned map * * @return Map of states (keyed by layer name) or null if layer is not an RNN layer * @see #rnnSetPreviousStates(Map) */ public Map<String, Map<String, INDArray>> rnnGetPreviousStates() { Map<String, Map<String, INDArray>> states = new HashMap<>(); for (Layer l : layers) { if (l instanceof RecurrentLayer) { states.put(l.conf().getLayer().getLayerName(), ((RecurrentLayer) l).rnnGetPreviousState()); } } return states; }
/** * Set the state of the RNN layer, for use in {@link #rnnTimeStep(INDArray...)} * * @param layer The number/index of the layer. * @param state The state to set the specified layer to */ public void rnnSetPreviousState(int layer, Map<String, INDArray> state) { rnnSetPreviousState(layers[layer].conf().getLayer().getLayerName(), state); }
/** * Get the state of the RNN layer, as used in {@link #rnnTimeStep(INDArray...)}. * * @param layer Number/index of the layer. * @return Hidden state, or null if layer is not an RNN layer */ public Map<String, INDArray> rnnGetPreviousState(int layer) { return rnnGetPreviousState(layers[layer].conf().getLayer().getLayerName()); }
protected String layerId() { String name = insideLayer.conf().getLayer().getLayerName(); return "(layer name: " + (name == null ? "\"\"" : name) + ", layer index: " + insideLayer.getIndex() + ")"; }
private MultiLayerConfiguration constructConf() { //use the editedConfs list to make a new config List<NeuralNetConfiguration> allConfs = new ArrayList<>(); allConfs.addAll(editedConfs); allConfs.addAll(appendConfs); //Set default layer names, if not set - as per NeuralNetConfiguration.ListBuilder.build() for (int i = 0; i < allConfs.size(); i++) { if (allConfs.get(i).getLayer().getLayerName() == null) { allConfs.get(i).getLayer().setLayerName("layer" + i); } } MultiLayerConfiguration conf = new MultiLayerConfiguration.Builder().inputPreProcessors(inputPreProcessors) .setInputType(this.inputType).confs(allConfs).build(); if (finetuneConfiguration != null) { finetuneConfiguration.applyToMultiLayerConfiguration(conf); } return conf; } }
/** * Get a {@link MemoryReport} for the given MultiLayerConfiguration. This is used to estimate the * memory requirements for the given network configuration and input * * @param inputType Input types for the network * @return Memory report for the network */ public NetworkMemoryReport getMemoryReport(InputType inputType) { Map<String, MemoryReport> memoryReportMap = new LinkedHashMap<>(); int nLayers = confs.size(); for (int i = 0; i < nLayers; i++) { String layerName = confs.get(i).getLayer().getLayerName(); if (layerName == null) { layerName = String.valueOf(i); } //Pass input type through preprocessor, if necessary InputPreProcessor preproc = getInputPreProcess(0); //TODO memory requirements for preprocessor if (preproc != null) { inputType = preproc.getOutputType(inputType); } LayerMemoryReport report = confs.get(i).getLayer().getMemoryReport(inputType); memoryReportMap.put(layerName, report); inputType = confs.get(i).getLayer().getOutputType(i, inputType); } return new NetworkMemoryReport(memoryReportMap, MultiLayerConfiguration.class, "MultiLayerNetwork", inputType); }
/** * Copy Keras layer weights to DL4J Layer. * * @param layer * @throws InvalidKerasConfigurationException */ public void copyWeightsToLayer(org.deeplearning4j.nn.api.Layer layer) throws InvalidKerasConfigurationException { if (this.getNumParams() > 0) { String dl4jLayerName = layer.conf().getLayer().getLayerName(); String kerasLayerName = this.getLayerName(); String msg = "Error when attempting to copy weights from Keras layer " + kerasLayerName + " to DL4J layer " + dl4jLayerName; if (this.weights == null) throw new InvalidKerasConfigurationException(msg + "(weights is null)"); Set<String> paramsInLayer = new HashSet<String>(layer.paramTable().keySet()); Set<String> paramsInKerasLayer = new HashSet<String>(this.weights.keySet()); /* Check for parameters in layer for which we don't have weights. */ paramsInLayer.removeAll(paramsInKerasLayer); for (String paramName : paramsInLayer) throw new InvalidKerasConfigurationException( msg + "(no stored weights for parameter " + paramName + ")"); /* Check for parameters NOT in layer for which we DO have weights. */ paramsInKerasLayer.removeAll(layer.paramTable().keySet()); for (String paramName : paramsInKerasLayer) throw new InvalidKerasConfigurationException(msg + "(found no parameter named " + paramName + ")"); /* Copy weights. */ for (String paramName : layer.paramTable().keySet()) layer.setParam(paramName, this.weights.get(paramName)); } }
String layerName = layer.conf().getLayer().getLayerName(); if (!this.layers.containsKey(layerName)) throw new InvalidKerasConfigurationException(
if (layerwise.get(i).getLayer().getLayerName() == null) { layerwise.get(i).getLayer().setLayerName("layer" + i);
private void configureLayer(Layer layer) { String layerName; if (layer == null || layer.getLayerName() == null) layerName = "Layer not named"; else layerName = layer.getLayerName(); learningRateValidation(layerName); if (layer != null) { copyConfigToLayer(layerName, layer); } if (layer instanceof FrozenLayer) { copyConfigToLayer(layerName, ((FrozenLayer) layer).getLayer()); } if (layer instanceof ConvolutionLayer) { ConvolutionLayer cl = (ConvolutionLayer) layer; if (cl.getConvolutionMode() == null) { cl.setConvolutionMode(convolutionMode); } } if (layer instanceof SubsamplingLayer) { SubsamplingLayer sl = (SubsamplingLayer) layer; if (sl.getConvolutionMode() == null) { sl.setConvolutionMode(convolutionMode); } } LayerValidation.generalValidation(layerName, layer, useRegularization, useDropConnect, dropOut, l2, l2Bias, l1, l1Bias, dist); }
String lName = l.getLayerName();