protected INDArray createUserWeightMatrix(NeuralNetConfiguration conf, INDArray weightParamView, boolean initializeParameters) { FeedForwardLayer layerConf = (FeedForwardLayer) conf.getLayer(); if (initializeParameters) { Distribution dist = Distributions.createDistribution(layerConf.getDist()); return createWeightMatrix(numUsers, layerConf.getNOut(), layerConf.getWeightInit(), dist, weightParamView, true); } else { return createWeightMatrix(numUsers, layerConf.getNOut(), null, null, weightParamView, false); } }
.nIn(4096).nOut(numClasses) .weightInit(WeightInit.DISTRIBUTION) .dist(new NormalDistribution(0,0.2 * (2.0/(4096 + numClasses)))) //This weight init dist gave better results than Xavier .activation(Activation.SOFTMAX).build(), "fc2")
@Override public void initializeBackend() { // Constructions normal distribution with lower limit -1 and upper limit 1 backend = new org.deeplearning4j.nn.conf.distribution.UniformDistribution(-1.0, 1.0); } }
@Override public org.deeplearning4j.nn.conf.distribution.BinomialDistribution getBackend() { return new org.deeplearning4j.nn.conf.distribution.BinomialDistribution(numberOfTrials, backend.getProbabilityOfSuccess()); } }
@Override public void initializeBackend() { // Constructs binomial distribution with 1 trial and success probability 0.5 backend = new org.deeplearning4j.nn.conf.distribution.BinomialDistribution(numberOfTrials, 0.5); }
@Override public void initializeBackend() { // Constructions normal distribution with mean 0 and unit variance backend = new org.deeplearning4j.nn.conf.distribution.OrthogonalDistribution(1.0); } }
@Override public void initializeBackend() { backend = new org.deeplearning4j.nn.conf.distribution.ConstantDistribution(1.0); } }
@Override public void initializeBackend() { // Constructions normal distribution with mean 0 and unit variance backend = new org.deeplearning4j.nn.conf.distribution.LogNormalDistribution(1e-3, 1.0); } }
@Override public void initializeBackend() { // Constructions normal distribution with mean 0 and unit variance backend = new org.deeplearning4j.nn.conf.distribution.TruncatedNormalDistribution(1e-3, 1.0); } }
public void setValue(double value) { backend.setValue(value); } /**
public void setStd(double std) { backend.setStd(std); }
public void setUpper(double std) { backend.setUpper(std); }
public void setLower(double mean) { backend.setLower(mean); }
public void setMean(double mean) { backend.setMean(mean); }
public void setMean(double mean) { backend.setMean(mean); }
public void setStd(double std) { backend.setStd(std); }
public void setProbabilityOfSuccess(double probabilityOfSuccess) { backend.setProbabilityOfSuccess(probabilityOfSuccess); }
public void setMean(double mean) { backend.setMean(mean); }
public void setStd(double std) { backend.setStd(std); }
@Override public void initializeBackend() { // Constructions normal distribution with mean 0 and unit variance backend = new org.deeplearning4j.nn.conf.distribution.NormalDistribution(1e-3, 1.0); } }