/** * Calculate activation from previous layer including pre processing where necessary * * @param curr the current layer * @param input the input * @return the activation from the previous layer */ public INDArray activationFromPrevLayer(int curr, INDArray input, boolean training) { if (getLayerWiseConfigurations().getInputPreProcess(curr) != null) input = getLayerWiseConfigurations().getInputPreProcess(curr).preProcess(input, getInputMiniBatchSize()); INDArray ret = layers[curr].activate(input, training); return ret; }
@Override public INDArray preOutput(INDArray x) { INDArray lastLayerActivation = x; for (int i = 0; i < layers.length - 1; i++) { if (getLayerWiseConfigurations().getInputPreProcess(i) != null) lastLayerActivation = getLayerWiseConfigurations().getInputPreProcess(i).preProcess(lastLayerActivation, getInputMiniBatchSize()); lastLayerActivation = layers[i].activate(lastLayerActivation); } if (getLayerWiseConfigurations().getInputPreProcess(layers.length - 1) != null) lastLayerActivation = getLayerWiseConfigurations().getInputPreProcess(layers.length - 1) .preProcess(lastLayerActivation, getInputMiniBatchSize()); return layers[layers.length - 1].preOutput(lastLayerActivation); }
/** * Compute input linear transformation (z) from previous layer * Apply pre processing transformation where necessary * * @param curr the current layer * @param input the input * @param training training or test mode * @return the activation from the previous layer */ public INDArray zFromPrevLayer(int curr, INDArray input, boolean training) { if (getLayerWiseConfigurations().getInputPreProcess(curr) != null) input = getLayerWiseConfigurations().getInputPreProcess(curr).preProcess(input, input.size(0)); INDArray ret = layers[curr].preOutput(input, training); return ret; }
/** * Compute activations from input to output of the output layer * * @return the list of activations for each layer */ public List<INDArray> feedForward(INDArray input) { if (input == null) throw new IllegalStateException("Unable to perform feed forward; no input found"); else if (this.getLayerWiseConfigurations().getInputPreProcess(0) != null) setInput(getLayerWiseConfigurations().getInputPreProcess(0).preProcess(input, input.size(0))); else setInput(input); return feedForward(); }
if (getLayerWiseConfigurations().getInputPreProcess(i) != null) currInput = getLayerWiseConfigurations().getInputPreProcess(i).preProcess(currInput, input.size(0)); if (layers[i] instanceof RecurrentLayer) { currInput = ((RecurrentLayer) layers[i]).rnnActivateUsingStoredState(currInput, training,
/** * Compute activations from input to output of the output layer * * @return the list of activations for each layer */ public List<INDArray> computeZ(INDArray input, boolean training) { if (input == null) throw new IllegalStateException("Unable to perform feed forward; no input found"); else if (this.getLayerWiseConfigurations().getInputPreProcess(0) != null) setInput(getLayerWiseConfigurations().getInputPreProcess(0).preProcess(input, getInputMiniBatchSize())); else setInput(input); return computeZ(training); }
/** * Get a {@link MemoryReport} for the given MultiLayerConfiguration. This is used to estimate the * memory requirements for the given network configuration and input * * @param inputType Input types for the network * @return Memory report for the network */ public NetworkMemoryReport getMemoryReport(InputType inputType) { Map<String, MemoryReport> memoryReportMap = new LinkedHashMap<>(); int nLayers = confs.size(); for (int i = 0; i < nLayers; i++) { String layerName = confs.get(i).getLayer().getLayerName(); if (layerName == null) { layerName = String.valueOf(i); } //Pass input type through preprocessor, if necessary InputPreProcessor preproc = getInputPreProcess(0); //TODO memory requirements for preprocessor if (preproc != null) { inputType = preproc.getOutputType(inputType); } LayerMemoryReport report = confs.get(i).getLayer().getMemoryReport(inputType); memoryReportMap.put(layerName, report); inputType = confs.get(i).getLayer().getOutputType(i, inputType); } return new NetworkMemoryReport(memoryReportMap, MultiLayerConfiguration.class, "MultiLayerNetwork", inputType); }
if (getLayerWiseConfigurations().getInputPreProcess(numLayers - 1) != null) currPair = new Pair<>(currPair.getFirst(), this.layerWiseConfigurations.getInputPreProcess(numLayers - 1) .backprop(currPair.getSecond(), getInputMiniBatchSize())); if (getLayerWiseConfigurations().getInputPreProcess(j) != null) currPair = new Pair<>(currPair.getFirst(), getLayerWiseConfigurations().getInputPreProcess(j) .backprop(currPair.getSecond(), getInputMiniBatchSize()));
if (layerWiseConfigurations.getInputPreProcess(layers.length - 1) != null) actSecondLastLayer = layerWiseConfigurations.getInputPreProcess(layers.length - 1) .preProcess(actSecondLastLayer, getInputMiniBatchSize()); getOutputLayer().setInput(actSecondLastLayer);
boolean inputIs2d = input.rank() == 2; for (int i = 0; i < layers.length; i++) { if (getLayerWiseConfigurations().getInputPreProcess(i) != null) input = getLayerWiseConfigurations().getInputPreProcess(i).preProcess(input, getInputMiniBatchSize()); if (layers[i] instanceof RecurrentLayer) { input = ((RecurrentLayer) layers[i]).rnnTimeStep(input);
currPair.getFirst().flatteningOrderForVariable(origName))); if (getLayerWiseConfigurations().getInputPreProcess(numLayers - 1) != null) currPair = new Pair<>(currPair.getFirst(), this.layerWiseConfigurations.getInputPreProcess(numLayers - 1) .backprop(currPair.getSecond(), getInputMiniBatchSize())); if (getLayerWiseConfigurations().getInputPreProcess(j) != null) currPair = new Pair<>(currPair.getFirst(), getLayerWiseConfigurations().getInputPreProcess(j) .backprop(currPair.getSecond(), getInputMiniBatchSize()));
IOutputLayer ol = (IOutputLayer) getOutputLayer(); INDArray olInput = activations.get(n - 1); if (getLayerWiseConfigurations().getInputPreProcess(n - 1) != null) { olInput = getLayerWiseConfigurations().getInputPreProcess(n - 1).preProcess(olInput, input.size(0));
InputPreProcessor preProcessor = getLayerWiseConfigurations().getInputPreProcess(i);
if (layerIdx == 0 && getLayerWiseConfigurations().getInputPreProcess(0) != null) { layerInput = getLayerWiseConfigurations().getInputPreProcess(0).preProcess(input, input.size(0));
layer = getLayer(i); if (i == 0) { if (getLayerWiseConfigurations().getInputPreProcess(i) != null) { layerInput = getLayerWiseConfigurations().getInputPreProcess(i) .preProcess(input, miniBatchSize) .leverageTo(ComputationGraph.workspacePretrain);