@Override public CompoundVector reduce(CompoundVector value1, CompoundVector value2) throws Exception { /* value2.sum(value1); return value2; */ CompoundVector newValue = Serialization.deepCopy(value1); newValue.sum(value2); return newValue; } }
@Override public byte[] map(T value) throws Exception { return Serialization.serializeObject(value); } });
@Override public T map(byte[] value) throws Exception { return Serialization.deserializeObject(value); } });
super.open(parameters); String bnName = parameters.getString(eu.amidst.flinklink.core.learning.parametric.ParallelVB.BN_NAME, ""); svb = Serialization.deserializeObject(parameters.getBytes(eu.amidst.flinklink.core.learning.parametric.ParallelVB.SVB, null)); svb.initLearning(); Collection<CompoundVector> collection = getRuntimeContext().getBroadcastVariable("VB_PARAMS_" + bnName); this.prior=Serialization.deepCopy(updatedPrior); this.svb.updateNaturalParameterPrior(prior); if (randomStart) { this.svb.getPlateuStructure().setSeed(this.svb.getSeed()); this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior()); List<String> names = Serialization.deserializeObject(parameters.getBytes(LATENT_INTERFACE_VARIABLE_NAMES, null)); latentInterfaceVariables = names.stream().map(name -> svb.getDAG().getVariables().getVariableByName(name)).collect(Collectors.toList());
super.open(parameters); bnName = parameters.getString(BN_NAME, ""); svb = Serialization.deserializeObject(parameters.getBytes(SVB, null)); CompoundVector referencePrior = Serialization.deepCopy(updatedPosterior); referencePrior.multiplyBy(factor); svb.updateNaturalParameterPrior(referencePrior); this.prior=Serialization.deepCopy(updatedPosterior); this.svb.updateNaturalParameterPrior(prior); if (randomStart) { this.svb.getPlateuStructure().setSeed(this.svb.getSeed()); this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior());
@Override public CompoundVector reduce(CompoundVector value1, CompoundVector value2) throws Exception { /* value2.sum(value1); return value2; */ CompoundVector newValue = Serialization.deepCopy(value1); newValue.sum(value2); return newValue; } }
super.open(parameters); bnName = parameters.getString(BN_NAME, ""); svb = Serialization.deserializeObject(parameters.getBytes(SVB, null)); svb.initLearning(); basedELBO = svb.getPlateuStructure().getNonReplictedNodes().mapToDouble(node -> svb.getPlateuStructure().getVMP().computeELBO(node)).sum(); }else{ this.prior=Serialization.deepCopy(updatedPrior); this.svb.updateNaturalParameterPrior(prior); if (randomStart) { this.svb.getPlateuStructure().setSeed(this.svb.getSeed()); this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior());
public DataSet<DataPosterior> computePosterior(List<Variable> latentVariables){ Attribute seq_id = this.dataFlink.getAttributes().getSeq_id(); if (seq_id==null) throw new IllegalArgumentException("Functionality only available for data sets with a seq_id attribute"); try{ Configuration config = new Configuration(); config.setString(ParameterLearningAlgorithm.BN_NAME, this.dag.getName()); config.setBytes(SVB, Serialization.serializeObject(svb)); config.setBytes(LATENT_VARS, Serialization.serializeObject(latentVariables)); return this.dataFlink .getBatchedDataSet(this.batchSize) .flatMap(new ParallelVBMapInference()) .withParameters(config); }catch(Exception ex){ throw new UndeclaredThrowableException(ex); } }
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); svb = Serialization.deserializeObject(parameters.getBytes(SVB, null)); svb.initLearning(); latentVariables = Serialization.deserializeObject(parameters.getBytes(LATENT_VARS, null)); } }
@Override public CompoundVector reduce(CompoundVector value1, CompoundVector value2) throws Exception { /* value2.sum(value1); return value2; */ CompoundVector newValue = Serialization.deepCopy(value1); newValue.sum(value2); return newValue; } }
super.open(parameters); bnName = parameters.getString(BN_NAME, ""); svb = Serialization.deserializeObject(parameters.getBytes(SVB, null)); int superstep = getIterationRuntimeContext().getSuperstepNumber() - 1; if (INITIALIZE && superstep==0) { basedELBO = svb.getPlateuStructure().getNonReplictedNodes().mapToDouble(node -> svb.getPlateuStructure().getVMP().computeELBO(node)).sum(); }else{ this.prior=Serialization.deepCopy(updatedPosterior); this.svb.updateNaturalParameterPrior(prior); if (randomStart) { this.svb.getPlateuStructure().setSeed(this.svb.getSeed()); this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior());
public DataSet<DataPosterior> computePosterior(DataFlink<DataInstance> dataFlink, List<Variable> latentVariables){ Attribute seq_id = dataFlink.getAttributes().getSeq_id(); if (seq_id==null) throw new IllegalArgumentException("Functionality only available for data sets with a seq_id attribute"); try{ Configuration config = new Configuration(); config.setString(ParameterLearningAlgorithm.BN_NAME, this.getName()); config.setBytes(SVB, Serialization.serializeObject(svb)); config.setBytes(LATENT_VARS, Serialization.serializeObject(latentVariables)); return dataFlink .getBatchedDataSet(this.batchSize,batchConverter) .flatMap(new ParallelVBMapInference()) .withParameters(config); }catch(Exception ex){ throw new UndeclaredThrowableException(ex); } }
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); svb = Serialization.deserializeObject(parameters.getBytes(SVB, null)); svb.initLearning(); latentVariables = Serialization.deserializeObject(parameters.getBytes(LATENT_VARS, null)); } }
/** * Creates a new FactoredFrontierForDBN object. * @param inferenceAlgorithm an {@link InferenceAlgorithm} object. */ public FactoredFrontierForDBN(InferenceAlgorithm inferenceAlgorithm){ infAlgTime0 = inferenceAlgorithm; infAlgTimeT = Serialization.deepCopy(inferenceAlgorithm); timeID = -1; this.setSeed(0); }
super.open(parameters); bnName = parameters.getString(BN_NAME, ""); svb = Serialization.deserializeObject(parameters.getBytes(SVB, null)); int superstep = getIterationRuntimeContext().getSuperstepNumber() - 1; if (INITIALIZE && superstep==0) { basedELBO = svb.getPlateuStructure().getNonReplictedNodes().mapToDouble(node -> svb.getPlateuStructure().getVMP().computeELBO(node)).sum(); }else{ this.prior=Serialization.deepCopy(updatedPosterior); this.svb.updateNaturalParameterPrior(prior); if (randomStart) { this.svb.getPlateuStructure().setSeed(this.svb.getSeed()); this.svb.getPlateuStructure().resetQs(); initialPosterior = Serialization.deepCopy(this.svb.getPlateuStructure().getPlateauNaturalParameterPosterior()); initialPosterior.sum(prior); }else{ initialPosterior=Serialization.deepCopy(svb.getNaturalParameterPrior());
public DataSet<DataPosterior> computePosterior(DataFlink<DataInstance> dataFlink, List<Variable> latentVariables){ Attribute seq_id = dataFlink.getAttributes().getSeq_id(); if (seq_id==null) throw new IllegalArgumentException("Functionality only available for data sets with a seq_id attribute"); try{ Configuration config = new Configuration(); config.setString(ParameterLearningAlgorithm.BN_NAME, this.dag.getName()); config.setBytes(SVB, Serialization.serializeObject(svb)); config.setBytes(LATENT_VARS, Serialization.serializeObject(latentVariables)); return dataFlink .getBatchedDataSet(this.batchSize) .flatMap(new ParallelVBMapInference()) .withParameters(config); }catch(Exception ex){ throw new UndeclaredThrowableException(ex); } }
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); svb = Serialization.deserializeObject(parameters.getBytes(SVB, null)); svb.initLearning(); latentVariables = Serialization.deserializeObject(parameters.getBytes(LATENT_VARS, null)); } }
@Override public CompoundVector reduce(CompoundVector value1, CompoundVector value2) throws Exception { /* value2.sum(value1); return value2; */ CompoundVector newValue = Serialization.deepCopy(value1); newValue.sum(value2); return newValue; } }
public DataSet<DataPosteriorAssignment> computePosteriorAssignment(DataFlink<DataInstance> dataFlink, List<Variable> latentVariables){ Attribute seq_id = dataFlink.getAttributes().getSeq_id(); if (seq_id==null) throw new IllegalArgumentException("Functionality only available for data sets with a seq_id attribute"); try{ Configuration config = new Configuration(); config.setString(ParameterLearningAlgorithm.BN_NAME, this.getName()); config.setBytes(SVB, Serialization.serializeObject(svb)); config.setBytes(LATENT_VARS, Serialization.serializeObject(latentVariables)); return dataFlink .getBatchedDataSet(this.batchSize,batchConverter) .flatMap(new ParallelVBMapInferenceAssignment()) .withParameters(config); }catch(Exception ex){ throw new UndeclaredThrowableException(ex); } }
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); svb = Serialization.deserializeObject(parameters.getBytes(SVB, null)); svb.initLearning(); latentVariables = Serialization.deserializeObject(parameters.getBytes(LATENT_VARS, null)); } }