@OptionMetadata( displayName = "number of rows in kernel", description = "The number of rows in the kernel (default = 5).", commandLineParamName = "rows", commandLineParamSynopsis = "-rows <int>", displayOrder = 4 ) public int getKernelSizeX() { return backend.getKernelSize()[0]; }
@ProgrammaticProperty public int[] getKernelSize() { return backend.getKernelSize(); }
@OptionMetadata( displayName = "number of columns in kernel", description = "The number of columns in the kernel (default = 5).", commandLineParamName = "columns", commandLineParamSynopsis = "-columns <int>", displayOrder = 5 ) public int getKernelSizeY() { return backend.getKernelSize()[1]; }
/** * Get the height and width * from the configuration * @param conf the configuration to get height and width from * @return the configuration to get height and width from */ public static int[] getHeightAndWidth(NeuralNetConfiguration conf) { return getHeightAndWidth( ((org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer()).getKernelSize()); }
@Override public int numParams(Layer l) { org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) l; int[] kernel = layerConf.getKernelSize(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); return nIn * nOut * kernel[0] * kernel[1] + nOut; }
org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer; map.put("Kernel size", Arrays.toString(layer1.getKernelSize())); map.put("Stride", Arrays.toString(layer1.getStride())); map.put("Padding", Arrays.toString(layer1.getPadding()));
org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer; map.put("Kernel size", Arrays.toString(layer1.getKernelSize())); map.put("Stride", Arrays.toString(layer1.getStride())); map.put("Padding", Arrays.toString(layer1.getPadding()));
org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer; map.put("Kernel size", Arrays.toString(layer1.getKernelSize())); map.put("Stride", Arrays.toString(layer1.getStride())); map.put("Padding", Arrays.toString(layer1.getPadding()));
@Override public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) { if (((org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer()).getKernelSize().length != 2) throw new IllegalArgumentException("Filter size must be == 2"); Map<String, INDArray> params = Collections.synchronizedMap(new LinkedHashMap<String, INDArray>()); org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer(); int[] kernel = layerConf.getKernelSize(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); INDArray biasView = paramsView.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, nOut)); INDArray weightView = paramsView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nOut, numParams(conf))); params.put(BIAS_KEY, createBias(conf, biasView, initializeParams)); params.put(WEIGHT_KEY, createWeightMatrix(conf, weightView, initializeParams)); conf.addVariable(WEIGHT_KEY); conf.addVariable(BIAS_KEY); return params; }
org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer.conf().getLayer(); mainLine.append("K: " + Arrays.toString(layer1.getKernelSize()) + " S: " + Arrays.toString(layer1.getStride()) + " P: " + Arrays.toString(layer1.getPadding())); subLine.append("nIn/nOut: [" + layer1.getNIn() + "/" + layer1.getNOut() + "]"); fullLine.append("Kernel size: ").append(Arrays.toString(layer1.getKernelSize())).append("<br/>"); fullLine.append("Stride: ").append(Arrays.toString(layer1.getStride())).append("<br/>"); fullLine.append("Padding: ").append(Arrays.toString(layer1.getPadding())).append("<br/>");
@Override public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer(); int[] kernel = layerConf.getKernelSize(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); INDArray biasGradientView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, nOut)); INDArray weightGradientView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nOut, numParams(conf))) .reshape('c', nOut, nIn, kernel[0], kernel[1]); Map<String, INDArray> out = new LinkedHashMap<>(); out.put(BIAS_KEY, biasGradientView); out.put(WEIGHT_KEY, weightGradientView); return out; }
org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer.conf().getLayer(); mainLine.append("K: " + Arrays.toString(layer1.getKernelSize()) + " S: " + Arrays.toString(layer1.getStride()) + " P: " + Arrays.toString(layer1.getPadding())); subLine.append("nIn/nOut: [" + layer1.getNIn() + "/" + layer1.getNOut() + "]"); fullLine.append("Kernel size: ").append(Arrays.toString(layer1.getKernelSize())).append("<br/>"); fullLine.append("Stride: ").append(Arrays.toString(layer1.getStride())).append("<br/>"); fullLine.append("Padding: ").append(Arrays.toString(layer1.getPadding())).append("<br/>");
if (layer instanceof ConvolutionLayer) { ConvolutionLayer cl = (ConvolutionLayer) layer; kernel = cl.getKernelSize(); stride = cl.getStride(); padding = cl.getPadding();
if (layer instanceof ConvolutionLayer) { ConvolutionLayer cl = (ConvolutionLayer) layer; kernel = cl.getKernelSize(); stride = cl.getStride(); padding = cl.getPadding();
if (layer instanceof ConvolutionLayer) { ConvolutionLayer cl = (ConvolutionLayer) layer; kernel = cl.getKernelSize(); stride = cl.getStride(); padding = cl.getPadding();
if (initializeParams) { Distribution dist = Distributions.createDistribution(layerConf.getDist()); int[] kernel = layerConf.getKernelSize(); int[] stride = layerConf.getStride(); weightView); } else { int[] kernel = layerConf.getKernelSize(); return WeightInitUtil.reshapeWeights( new int[] {layerConf.getNOut(), layerConf.getNIn(), kernel[0], kernel[1]}, weightView, 'c');
int kW = weights.size(3); int[] kernel = layerConf().getKernelSize(); int[] strides = layerConf().getStride(); int[] pad;
int kW = weights.size(3); int[] kernel = layerConf().getKernelSize(); int[] strides = layerConf().getStride();