@Override public CompoundVector reduce(CompoundVector value1, CompoundVector value2) throws Exception { /* value2.sum(value1); return value2; */ CompoundVector newValue = Serialization.deepCopy(value1); newValue.sum(value2); return newValue; } }
public double[] getLambdaMomentParameters(){ double[] out = new double[this.prior.getNumberOfBaseVectors()]; for (int i = 0; i < out.length; i++) { out[i] = this.ef_TExpQ[i].getMomentParameters().get(0); } return out; }
for (int i = 0; i < prior.getNumberOfBaseVectors(); i++) { this.ef_TExpQ[i] = truncatedExpVar.getDistributionType().newEFUnivariateDistribution(this.getDelta()); this.ef_TExpQ[i].setUpperInterval(this.ef_TExpP.getUpperInterval()); double[] lambda = new double[prior.getNumberOfBaseVectors()]; for (int i = 0; i < lambda.length; i++) { lambda[i] = this.ef_TExpQ[i].getMomentParameters().get(0); newPrior.getVectorByPosition(i).multiplyBy(1 - lambda[i]); newPosterior.getVectorByPosition(i).multiplyBy(lambda[i]); newPrior.sum(newPosterior); this.plateuStructure.updateNaturalParameterPrior(newPrior); double[] kl_q_p0 = new double[this.prior.getNumberOfBaseVectors()]; int count = 0; double[] kl_q_pt_1 = new double[this.prior.getNumberOfBaseVectors()]; count = 0;
this.idenitifableModelling.isActiveAtEpoch(node.getMainVariable(), superstep-1)) .forEach(node -> updatedPosterior.setVectorByPosition(count[0] - 1, newVector.getVectorByPosition(count[0]-1)) ); }else{ this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior());
for (int i = 0; i < lambda.length; i++) { for (int j = 0; j < lambda[i].length; j++) { newPrior.getVectorByPosition(i).set(j,newPrior.getVectorByPosition(i).get(j)*(1-lambda[i][j])); newPosterior.getVectorByPosition(i).set(j,newPosterior.getVectorByPosition(i).get(j)*(lambda[i][j])); newPrior.sum(newPosterior); this.plateuStructure.updateNaturalParameterPrior(newPrior);
updatedPosterior = collection.iterator().next(); else{ updatedPosterior.multiplyBy(1-learningRate); CompoundVector update= collection.iterator().next(); update.multiplyBy(learningRate); updatedPosterior.sum(update); this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior());
newVector.substract(partialVectors.get(batchID)); this.svb.updateNaturalParameterPrior(newVector); this.svb.updateNaturalParameterPosteriors(updatedPrior);
referencePrior.multiplyBy(factor); svb.updateNaturalParameterPrior(referencePrior); this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior());
newVector.substract(partialVectors.get(dataBatch.getBatchID())); this.svb.updateNaturalParameterPrior(newVector); this.svb.updateNaturalParameterPosteriors(updatedPrior);
@Override public CompoundVector reduce(CompoundVector value1, CompoundVector value2) throws Exception { /* value2.sum(value1); return value2; */ CompoundVector newValue = Serialization.deepCopy(value1); newValue.sum(value2); return newValue; } }
public double[] getLambdaNaturalParameters(){ double[] out = new double[this.prior.getNumberOfBaseVectors()]; for (int i = 0; i < out.length; i++) { out[i] = this.ef_TExpQ[i].getNaturalParameters().get(0); } return out; }
@Override public CompoundVector reduce(CompoundVector value1, CompoundVector value2) throws Exception { /* value2.sum(value1); return value2; */ CompoundVector newValue = Serialization.deepCopy(value1); newValue.sum(value2); return newValue; } }
/** * {@inheritDoc} */ @Override public void initLearning() { super.initLearning(); truncatedExpVar = new Variables().newTruncatedExponential("TruncatedExponentialVar"); this.ef_TExpP = truncatedExpVar.getDistributionType().newEFUnivariateDistribution(this.getDelta()); prior = this.plateuStructure.getPlateauNaturalParameterPrior(); int size = prior.getNumberOfBaseVectors(); this.ef_TExpQ = new EF_TruncatedExponential[size]; for (int i = 0; i < size; i++) { this.ef_TExpQ[i] = truncatedExpVar.getDistributionType().newEFUnivariateDistribution(this.getDelta()); } firstBatch=true; }
@Override public CompoundVector reduce(CompoundVector value1, CompoundVector value2) throws Exception { /* value2.sum(value1); return value2; */ CompoundVector newValue = Serialization.deepCopy(value1); newValue.sum(value2); return newValue; } }
/** * {@inheritDoc} */ @Override public void initLearning() { super.initLearning(); truncatedExpVar = new Variables().newTruncatedExponential("TruncatedExponentialVar"); this.ef_TExpP = truncatedExpVar.getDistributionType().newEFUnivariateDistribution(this.getDelta()); prior = this.plateuStructure.getPlateauNaturalParameterPrior(); for (Variable variable : this.plateuStructure.getNonReplicatedVariables()) { if (variable.getNumberOfStates()>maxsize) maxsize=variable.getNumberOfStates(); } this.ef_TExpQ = new EF_TruncatedExponential[prior.getNumberOfBaseVectors()][maxsize]; for (int i = 0; i < this.ef_TExpQ.length; i++) { for (int j = 0; j < this.ef_TExpQ[i].length; j++) { this.ef_TExpQ[i][j] = truncatedExpVar.getDistributionType().newEFUnivariateDistribution(this.getDelta()); } } firstBatch=true; }
this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior());
this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior());