@Override public int numParams(Layer l) { org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) l; int[] kernel = layerConf.getKernelSize(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); return nIn * nOut * kernel[0] * kernel[1] + nOut; }
mainLine.append("K: " + Arrays.toString(layer1.getKernelSize()) + " S: " + Arrays.toString(layer1.getStride()) + " P: " + Arrays.toString(layer1.getPadding())); subLine.append("nIn/nOut: [" + layer1.getNIn() + "/" + layer1.getNOut() + "]"); fullLine.append("Kernel size: ").append(Arrays.toString(layer1.getKernelSize())).append("<br/>"); fullLine.append("Stride: ").append(Arrays.toString(layer1.getStride())).append("<br/>"); fullLine.append("Padding: ").append(Arrays.toString(layer1.getPadding())).append("<br/>"); fullLine.append("Inputs number: ").append(layer1.getNIn()).append("<br/>"); fullLine.append("Outputs number: ").append(layer1.getNOut()).append("<br/>"); } else if (layer.conf().getLayer() instanceof SubsamplingLayer) {
mainLine.append("K: " + Arrays.toString(layer1.getKernelSize()) + " S: " + Arrays.toString(layer1.getStride()) + " P: " + Arrays.toString(layer1.getPadding())); subLine.append("nIn/nOut: [" + layer1.getNIn() + "/" + layer1.getNOut() + "]"); fullLine.append("Kernel size: ").append(Arrays.toString(layer1.getKernelSize())).append("<br/>"); fullLine.append("Stride: ").append(Arrays.toString(layer1.getStride())).append("<br/>"); fullLine.append("Padding: ").append(Arrays.toString(layer1.getPadding())).append("<br/>"); fullLine.append("Inputs number: ").append(layer1.getNIn()).append("<br/>"); fullLine.append("Outputs number: ").append(layer1.getNOut()).append("<br/>"); } else if (layer.conf().getLayer() instanceof SubsamplingLayer) {
@Override public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer(); int[] kernel = layerConf.getKernelSize(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); INDArray biasGradientView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, nOut)); INDArray weightGradientView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nOut, numParams(conf))) .reshape('c', nOut, nIn, kernel[0], kernel[1]); Map<String, INDArray> out = new LinkedHashMap<>(); out.put(BIAS_KEY, biasGradientView); out.put(WEIGHT_KEY, weightGradientView); return out; }
@Override public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) { if (((org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer()).getKernelSize().length != 2) throw new IllegalArgumentException("Filter size must be == 2"); Map<String, INDArray> params = Collections.synchronizedMap(new LinkedHashMap<String, INDArray>()); org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer(); int[] kernel = layerConf.getKernelSize(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); INDArray biasView = paramsView.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, nOut)); INDArray weightView = paramsView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nOut, numParams(conf))); params.put(BIAS_KEY, createBias(conf, biasView, initializeParams)); params.put(WEIGHT_KEY, createWeightMatrix(conf, weightView, initializeParams)); conf.addVariable(WEIGHT_KEY); conf.addVariable(BIAS_KEY); return params; }
@Override public Layer instantiate(NeuralNetConfiguration conf, Collection<IterationListener> iterationListeners, int layerIndex, INDArray layerParamsView, boolean initializeParams) { LayerValidation.assertNInNOutSet("ConvolutionLayer", getLayerName(), layerIndex, getNIn(), getNOut()); org.deeplearning4j.nn.layers.convolution.ConvolutionLayer ret = new org.deeplearning4j.nn.layers.convolution.ConvolutionLayer(conf); ret.setListeners(iterationListeners); ret.setIndex(layerIndex); ret.setParamsViewArray(layerParamsView); Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams); ret.setParamTable(paramTable); ret.setConf(conf); return ret; }