/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasLRN(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); Map<String, Object> lrnParams = getInnerLayerConfigFromConfig(layerConfig); LocalResponseNormalization.Builder builder = new LocalResponseNormalization.Builder().name(this.layerName) .dropOut(this.dropout).alpha((double) lrnParams.get("alpha")) .beta((double) lrnParams.get("beta")).k((int) lrnParams.get("k")).n((int) lrnParams.get("n")); this.layer = builder.build(); this.vertex = null; }
.activation(Activation.RELU) .build()) .layer(1, new LocalResponseNormalization.Builder().name("lrn1").build()) .layer(new SubsamplingLayer.Builder(PoolingType.MAX) .kernelSize(3,3) .activation(Activation.RELU) .build()) .layer(1, new LocalResponseNormalization.Builder().name("lrn2").build()) .layer(new SubsamplingLayer.Builder(PoolingType.MAX) .kernelSize(3,3)
.stride(2, 2) .build()) .layer(2, new LocalResponseNormalization.Builder().build()) .layer(3, new ConvolutionLayer.Builder(5, 5) .nOut(64) .stride(2, 2) .build()) .layer(5, new LocalResponseNormalization.Builder().build()) .layer(6, new DenseLayer.Builder().nOut(1024).dropOut(dropOut).activation(Activation.RELU).build()) .layer(7, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
new int[] {2, 2}, new int[] {1, 1}).build(), "stem-activation1") .addLayer("stem-lrn1", new LocalResponseNormalization.Builder(1, 5, 1e-4, 0.75).build(), "stem-pool1") .addLayer("inception-2-lrn1", new LocalResponseNormalization.Builder(1, 5, 1e-4, 0.75).build(), "inception-2-activation2") .addLayer("inception-2-pool1",
.layer(1, new LocalResponseNormalization.Builder().name("lrn1").build()) .layer(2, maxPool("maxpool1", new int[]{3,3})) .layer(3, conv5x5("cnn2", 256, new int[] {1,1}, new int[] {2,2}, nonZeroBias)) .layer(4, new LocalResponseNormalization.Builder().name("lrn2").build()) .layer(5, maxPool("maxpool2", new int[]{3,3})) .layer(6,conv3x3("cnn3", 384, 0))
new int[] {0, 0}).build(), "cnn1") .addLayer("lrn1", new LocalResponseNormalization.Builder(5, 1e-4, 0.75).build(), "max1") .addLayer("cnn2", conv1x1(64, 64, 0.2), "lrn1").addLayer("cnn3", conv3x3(64, 192, 0.2), "cnn2") .addLayer("lrn2", new LocalResponseNormalization.Builder(5, 1e-4, 0.75).build(), "cnn3") .addLayer("max2", new SubsamplingLayer.Builder(new int[] {3, 3}, new int[] {2, 2}, new int[] {0, 0}).build(), "lrn2");
@Override public LocalResponseNormalization getValue(double[] values) { LocalResponseNormalization.Builder b = new LocalResponseNormalization.Builder(); setLayerOptionsBuilder(b, values); return b.build(); }