input) .addLayer("stem-batch1", new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(32).nOut(32) .build(), "stem-cnn1") "stem-batch1") .addLayer("stem-batch2", new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(32).nOut(32) .build(), "stem-cnn2") .cudnnAlgoMode(ConvolutionLayer.AlgoMode.NO_WORKSPACE).build(), "stem-batch2") .addLayer("stem-batch3", new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(64) .nOut(64).build(), "stem-cnn3") "stem-pool4") .addLayer("stem-batch5", new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(80).nOut(80) .build(), "stem-cnn5") "stem-batch5") .addLayer("stem-batch6", new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(128).nOut(128) .build(), "stem-cnn6")
previousBlock) .addLayer(nameLayer(blockName, "batch1", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(32) .nOut(32).build(), nameLayer(blockName, "cnn1", i)) previousBlock) .addLayer(nameLayer(blockName, "batch2", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(32) .nOut(32).build(), nameLayer(blockName, "cnn2", i)) nameLayer(blockName, "batch2", i)) .addLayer(nameLayer(blockName, "batch3", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(32) .nOut(32).build(), nameLayer(blockName, "cnn3", i)) previousBlock) .addLayer(nameLayer(blockName, "batch4", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(32) .nOut(32).build(), nameLayer(blockName, "cnn4", i)) nameLayer(blockName, "batch4", i)) .addLayer(nameLayer(blockName, "batch5", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(32) .nOut(32).build(), nameLayer(blockName, "cnn5", i))
previousBlock) .addLayer(nameLayer(blockName, "batch1", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(128) .nOut(128).build(), nameLayer(blockName, "cnn1", i)) previousBlock) .addLayer(nameLayer(blockName, "batch2", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(128) .nOut(128).build(), nameLayer(blockName, "cnn2", i)) nameLayer(blockName, "batch2", i)) .addLayer(nameLayer(blockName, "batch3", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(128) .nOut(128).build(), nameLayer(blockName, "cnn3", i)) nameLayer(blockName, "batch3", i)) .addLayer(nameLayer(blockName, "batch4", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(128) .nOut(128).build(), nameLayer(blockName, "cnn4", i)) nameLayer(blockName, "merge1", i)) .addLayer(nameLayer(blockName, "batch5", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(576) .nOut(576).build(), nameLayer(blockName, "cnn5", i))
previousBlock) .addLayer(nameLayer(blockName, "batch1", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(192) .nOut(192).build(), nameLayer(blockName, "cnn1", i)) previousBlock) .addLayer(nameLayer(blockName, "batch2", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(192) .nOut(192).build(), nameLayer(blockName, "cnn2", i)) nameLayer(blockName, "batch2", i)) .addLayer(nameLayer(blockName, "batch3", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001).nIn(192) .nOut(192).build(), nameLayer(blockName, "cnn3", i)) nameLayer(blockName, "batch3", i)) .addLayer(nameLayer(blockName, "batch4", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001) .activation(Activation.TANH).nIn(192).nOut(192).build(), nameLayer(blockName, "cnn4", i)) nameLayer(blockName, "merge1", i)) .addLayer(nameLayer(blockName, "batch5", i), new BatchNormalization.Builder(false).decay(0.995).eps(0.001) .activation(Activation.TANH).nIn(1344).nOut(1344).build(), nameLayer(blockName, "cnn5", i))
/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasBatchNormalization(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); getGammaRegularizerFromConfig(layerConfig, enforceTrainingConfig); getBetaRegularizerFromConfig(layerConfig, enforceTrainingConfig); int batchNormMode = getBatchNormMode(layerConfig, enforceTrainingConfig); int batchNormAxis = getBatchNormAxis(layerConfig, enforceTrainingConfig); this.layer = new BatchNormalization.Builder().name(this.layerName).dropOut(this.dropout).minibatch(true) .lockGammaBeta(false).eps(getEpsFromConfig(layerConfig)) .momentum(getMomentumFromConfig(layerConfig)).build(); }
protected void setLayerOptionsBuilder(BatchNormalization.Builder builder, double[] values) { super.setLayerOptionsBuilder(builder, values); if (decay != null) builder.decay(decay.getValue(values)); if (eps != null) builder.eps(eps.getValue(values)); if (isMinibatch != null) builder.minibatch(isMinibatch.getValue(values)); if (lockGammaBeta != null) builder.lockGammaBeta(lockGammaBeta.getValue(values)); if (gamma != null) builder.gamma(gamma.getValue(values)); if (beta != null) builder.beta(beta.getValue(values)); }