for( int n = 0; n < N; n++ ) final Vector xn = this.weightedData.get(n).getValue(); double[] an = this.assignments.get(n); System.arraycopy(an, 0, anold, 0, K);
for( int n = 0; n < N; n++ ) final double xn = this.weightedData.get(n).getValue(); double[] an = this.assignments.get(n); System.arraycopy(an, 0, anold, 0, K);
/** * Creates a shallow copy of the WeightedValue. The weight is copied but * the value is not. * * @return A new shallow copy of this {@code WeightedValue}. */ @Override public DefaultWeightedValue<ValueType> clone() { @SuppressWarnings("unchecked") final DefaultWeightedValue<ValueType> clone = (DefaultWeightedValue<ValueType>) super.clone(); clone.setValue(ObjectUtil.cloneSmart(this.getValue())); return clone; }
/** * Creates a shallow copy of the WeightedValue. The weight is copied but * the value is not. * * @return A new shallow copy of this {@code WeightedValue}. */ @Override public DefaultWeightedValue<ValueType> clone() { @SuppressWarnings("unchecked") final DefaultWeightedValue<ValueType> clone = (DefaultWeightedValue<ValueType>) super.clone(); clone.setValue(ObjectUtil.cloneSmart(this.getValue())); return clone; }
/** * Creates a shallow copy of the WeightedValue. The weight is copied but * the value is not. * * @return A new shallow copy of this {@code WeightedValue}. */ @Override public DefaultWeightedValue<ValueType> clone() { @SuppressWarnings("unchecked") final DefaultWeightedValue<ValueType> clone = (DefaultWeightedValue<ValueType>) super.clone(); clone.setValue(ObjectUtil.cloneSmart(this.getValue())); return clone; }
final double logLikelihood = wv.getValue(); final double weight = 1.0/Math.exp(logLikelihood - maxLogLikelihood); wv.setWeight(weight);
final double logLikelihood = wv.getValue(); final double weight = 1.0/Math.exp(logLikelihood - maxLogLikelihood); wv.setWeight(weight);
final double logLikelihood = wv.getValue(); final double weight = 1.0/Math.exp(logLikelihood - maxLogLikelihood); wv.setWeight(weight);
/** * Computes the squared 2-norm of the weight vector implied by the given * kernel binary categorizer. Useful for bypassing the square-root * computation in the 2-norm computation. * * @param <InputType> * The type of input to the categorizer value. * @param target * A kernel binary categorizer. * @return * The 2-norm of the categorizer according to the kernel. */ public static <InputType> double norm2Squared( final DefaultKernelBinaryCategorizer<InputType> target) { double result = 0.0; final double bias = target.getBias(); for (DefaultWeightedValue<InputType> example : target.getExamples()) { double sum = target.evaluateAsDouble(example.getValue()) - bias; result += sum * example.getWeight(); } return result; }
/** * Computes the squared 2-norm of the weight vector implied by the given * kernel binary categorizer. Useful for bypassing the square-root * computation in the 2-norm computation. * * @param <InputType> * The type of input to the categorizer value. * @param target * A kernel binary categorizer. * @return * The 2-norm of the categorizer according to the kernel. */ public static <InputType> double norm2Squared( final DefaultKernelBinaryCategorizer<InputType> target) { double result = 0.0; final double bias = target.getBias(); for (DefaultWeightedValue<InputType> example : target.getExamples()) { double sum = target.evaluateAsDouble(example.getValue()) - bias; result += sum * example.getWeight(); } return result; }
/** * Computes the squared 2-norm of the weight vector implied by the given * kernel binary categorizer. Useful for bypassing the square-root * computation in the 2-norm computation. * * @param <InputType> * The type of input to the categorizer value. * @param target * A kernel binary categorizer. * @return * The 2-norm of the categorizer according to the kernel. */ public static <InputType> double norm2Squared( final DefaultKernelBinaryCategorizer<InputType> target) { double result = 0.0; final double bias = target.getBias(); for (DefaultWeightedValue<InputType> example : target.getExamples()) { double sum = target.evaluateAsDouble(example.getValue()) - bias; result += sum * example.getWeight(); } return result; }
@Override public DataDistribution<ParameterType> learn( final Collection<? extends ObservationType> data) { ArrayList<DefaultWeightedValue<ParameterType>> weightedSamples = new ArrayList<DefaultWeightedValue<ParameterType>>( this.getNumSamples()); double maxWeight = Double.NEGATIVE_INFINITY; for( int n = 0; n < this.getNumSamples(); n++ ) { ParameterType parameter = this.getUpdater().makeProposal(random); double ll = this.getUpdater().computeLogLikelihood(parameter, data); double lq = this.getUpdater().computeLogImportanceValue(parameter); double weight = ll - lq; if( maxWeight < weight ) { maxWeight = weight; } weightedSamples.add( new DefaultWeightedValue<ParameterType>( parameter, weight ) ); } maxWeight -= Math.log(Double.MAX_VALUE/ this.getNumSamples() / 2.0 ); DataDistribution<ParameterType> retval = new DefaultDataDistribution<ParameterType>( this.getNumSamples()); for( DefaultWeightedValue<ParameterType> weightedSample : weightedSamples ) { double mass = Math.exp(weightedSample.getWeight() - maxWeight); retval.increment( weightedSample.getValue(), mass ); } return retval; }
@Override public DataDistribution<ParameterType> learn( final Collection<? extends ObservationType> data) { ArrayList<DefaultWeightedValue<ParameterType>> weightedSamples = new ArrayList<DefaultWeightedValue<ParameterType>>( this.getNumSamples()); double maxWeight = Double.NEGATIVE_INFINITY; for( int n = 0; n < this.getNumSamples(); n++ ) { ParameterType parameter = this.getUpdater().makeProposal(random); double ll = this.getUpdater().computeLogLikelihood(parameter, data); double lq = this.getUpdater().computeLogImportanceValue(parameter); double weight = ll - lq; if( maxWeight < weight ) { maxWeight = weight; } weightedSamples.add( new DefaultWeightedValue<ParameterType>( parameter, weight ) ); } maxWeight -= Math.log(Double.MAX_VALUE/ this.getNumSamples() / 2.0 ); DataDistribution<ParameterType> retval = new DefaultDataDistribution<ParameterType>( this.getNumSamples()); for( DefaultWeightedValue<ParameterType> weightedSample : weightedSamples ) { double mass = Math.exp(weightedSample.getWeight() - maxWeight); retval.increment( weightedSample.getValue(), mass ); } return retval; }
@Override public DataDistribution<ParameterType> learn( final Collection<? extends ObservationType> data) { ArrayList<DefaultWeightedValue<ParameterType>> weightedSamples = new ArrayList<DefaultWeightedValue<ParameterType>>( this.getNumSamples()); double maxWeight = Double.NEGATIVE_INFINITY; for( int n = 0; n < this.getNumSamples(); n++ ) { ParameterType parameter = this.getUpdater().makeProposal(random); double ll = this.getUpdater().computeLogLikelihood(parameter, data); double lq = this.getUpdater().computeLogImportanceValue(parameter); double weight = ll - lq; if( maxWeight < weight ) { maxWeight = weight; } weightedSamples.add( new DefaultWeightedValue<ParameterType>( parameter, weight ) ); } maxWeight -= Math.log(Double.MAX_VALUE/ this.getNumSamples() / 2.0 ); DataDistribution<ParameterType> retval = new DefaultDataDistribution<ParameterType>( this.getNumSamples()); for( DefaultWeightedValue<ParameterType> weightedSample : weightedSamples ) { double mass = Math.exp(weightedSample.getWeight() - maxWeight); retval.increment( weightedSample.getValue(), mass ); } return retval; }
target.getPrototypes().get(category.getValue()); final double errorWeight = category.getWeight() / differenceSum; prototype.getWeights().minusEquals(input.scale(errorWeight));
particles.set( updatedParticle.getValue(), weight/weightSum );
particles.set( updatedParticle.getValue(), weight/weightSum );