/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasBatchNormalization(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); getGammaRegularizerFromConfig(layerConfig, enforceTrainingConfig); getBetaRegularizerFromConfig(layerConfig, enforceTrainingConfig); int batchNormMode = getBatchNormMode(layerConfig, enforceTrainingConfig); int batchNormAxis = getBatchNormAxis(layerConfig, enforceTrainingConfig); this.layer = new BatchNormalization.Builder().name(this.layerName).dropOut(this.dropout).minibatch(true) .lockGammaBeta(false).eps(getEpsFromConfig(layerConfig)) .momentum(getMomentumFromConfig(layerConfig)).build(); }
protected void setLayerOptionsBuilder(BatchNormalization.Builder builder, double[] values) { super.setLayerOptionsBuilder(builder, values); if (decay != null) builder.decay(decay.getValue(values)); if (eps != null) builder.eps(eps.getValue(values)); if (isMinibatch != null) builder.minibatch(isMinibatch.getValue(values)); if (lockGammaBeta != null) builder.lockGammaBeta(lockGammaBeta.getValue(values)); if (gamma != null) builder.gamma(gamma.getValue(values)); if (beta != null) builder.beta(beta.getValue(values)); }