/** * Gets the alpha (weight) value for the given training example index. Note * that alpha values are stored as weights that incorporate the label as the * sign of the weight. That is, weight = y * alpha where y is either +1 or * -1. * * @param i * The training example index. Must be between 0 and dataSize - 1. * @return * The current alpha value for i. */ private double getAlpha( final int i) { final DefaultWeightedValue<InputType> support = this.supportsMap.get(i); if (support == null) { // Not a support, so the alpha value is zero. return 0.0; } else { // The weight is the label (+1 or -1) times alpha. Alpha is always // greater than zero, so we just take the absolute value of the // weight to get it. return Math.abs(support.getWeight()); } }
/** * Gets the alpha (weight) value for the given training example index. Note * that alpha values are stored as weights that incorporate the label as the * sign of the weight. That is, weight = y * alpha where y is either +1 or * -1. * * @param i * The training example index. Must be between 0 and dataSize - 1. * @return * The current alpha value for i. */ private double getAlpha( final int i) { final DefaultWeightedValue<InputType> support = this.supportsMap.get(i); if (support == null) { // Not a support, so the alpha value is zero. return 0.0; } else { // The weight is the label (+1 or -1) times alpha. Alpha is always // greater than zero, so we just take the absolute value of the // weight to get it. return Math.abs(support.getWeight()); } }
/** * Computes the output of the SVM for the given example index. * * @param i * The training example index. Must be between 0 and dataSize - 1. * @return * The output of the SVM for that example. */ private double getSVMOutput( final int i) { double retval = this.result.getBias(); for (Map.Entry<Integer, DefaultWeightedValue<InputType>> entry : this.supportsMap.entrySet()) { retval += entry.getValue().getWeight() * this.evaluateKernel(i, entry.getKey()); } return retval; }
/** * Computes the output of the SVM for the given example index. * * @param i * The training example index. Must be between 0 and dataSize - 1. * @return * The output of the SVM for that example. */ private double getSVMOutput( final int i) { double retval = this.result.getBias(); for (Map.Entry<Integer, DefaultWeightedValue<InputType>> entry : this.supportsMap.entrySet()) { retval += entry.getValue().getWeight() * this.evaluateKernel(i, entry.getKey()); } return retval; }
/** * Computes the output of the SVM for the given example index. * * @param i * The training example index. Must be between 0 and dataSize - 1. * @return * The output of the SVM for that example. */ private double getSVMOutput( final int i) { double retval = this.result.getBias(); for (Map.Entry<Integer, DefaultWeightedValue<InputType>> entry : this.supportsMap.entrySet()) { retval += entry.getValue().getWeight() * this.evaluateKernel(i, entry.getKey()); } return retval; }
/** * Scales all of the weights in the given kernel binary categorizer by * the given value. * * @param target * The kernel binary categorizer to update the weights on. * @param scale * The scale to apply to all the weights. */ public static void scaleEquals( final DefaultKernelBinaryCategorizer<?> target, final double scale) { for (DefaultWeightedValue<?> example : target.getExamples()) { final double oldWeight = example.getWeight(); final double newWeight = scale * oldWeight; example.setWeight(newWeight); } } }
/** * Scales all of the weights in the given kernel binary categorizer by * the given value. * * @param target * The kernel binary categorizer to update the weights on. * @param scale * The scale to apply to all the weights. */ public static void scaleEquals( final DefaultKernelBinaryCategorizer<?> target, final double scale) { for (DefaultWeightedValue<?> example : target.getExamples()) { final double oldWeight = example.getWeight(); final double newWeight = scale * oldWeight; example.setWeight(newWeight); } } }
/** * Scales all of the weights in the given kernel binary categorizer by * the given value. * * @param target * The kernel binary categorizer to update the weights on. * @param scale * The scale to apply to all the weights. */ public static void scaleEquals( final DefaultKernelBinaryCategorizer<?> target, final double scale) { for (DefaultWeightedValue<?> example : target.getExamples()) { final double oldWeight = example.getWeight(); final double newWeight = scale * oldWeight; example.setWeight(newWeight); } } }
/** * Computes the squared 2-norm of the weight vector implied by the given * kernel binary categorizer. Useful for bypassing the square-root * computation in the 2-norm computation. * * @param <InputType> * The type of input to the categorizer value. * @param target * A kernel binary categorizer. * @return * The 2-norm of the categorizer according to the kernel. */ public static <InputType> double norm2Squared( final DefaultKernelBinaryCategorizer<InputType> target) { double result = 0.0; final double bias = target.getBias(); for (DefaultWeightedValue<InputType> example : target.getExamples()) { double sum = target.evaluateAsDouble(example.getValue()) - bias; result += sum * example.getWeight(); } return result; }
/** * Computes the squared 2-norm of the weight vector implied by the given * kernel binary categorizer. Useful for bypassing the square-root * computation in the 2-norm computation. * * @param <InputType> * The type of input to the categorizer value. * @param target * A kernel binary categorizer. * @return * The 2-norm of the categorizer according to the kernel. */ public static <InputType> double norm2Squared( final DefaultKernelBinaryCategorizer<InputType> target) { double result = 0.0; final double bias = target.getBias(); for (DefaultWeightedValue<InputType> example : target.getExamples()) { double sum = target.evaluateAsDouble(example.getValue()) - bias; result += sum * example.getWeight(); } return result; }
/** * Computes the squared 2-norm of the weight vector implied by the given * kernel binary categorizer. Useful for bypassing the square-root * computation in the 2-norm computation. * * @param <InputType> * The type of input to the categorizer value. * @param target * A kernel binary categorizer. * @return * The 2-norm of the categorizer according to the kernel. */ public static <InputType> double norm2Squared( final DefaultKernelBinaryCategorizer<InputType> target) { double result = 0.0; final double bias = target.getBias(); for (DefaultWeightedValue<InputType> example : target.getExamples()) { double sum = target.evaluateAsDouble(example.getValue()) - bias; result += sum * example.getWeight(); } return result; }
@Override public void update( final DefaultKernelBinaryCategorizer<InputType> target, final InputType input, final boolean output) { OnlineKernelPerceptron.update(target, input, output, true); // Remove instances to recover the budget. int size = target.getExampleCount(); while (size > this.getBudget()) { final int randomIndex = this.getRandom().nextInt(size); final DefaultWeightedValue<InputType> entry = target.remove(randomIndex); target.setBias(target.getBias() - entry.getWeight()); size--; } }
@Override public void update( final DefaultKernelBinaryCategorizer<InputType> target, final InputType input, final boolean output) { OnlineKernelPerceptron.update(target, input, output, true); // Remove instances to recover the budget. int size = target.getExampleCount(); while (size > this.getBudget()) { final int randomIndex = this.getRandom().nextInt(size); final DefaultWeightedValue<InputType> entry = target.remove(randomIndex); target.setBias(target.getBias() - entry.getWeight()); size--; } }
@Override public void update( final DefaultKernelBinaryCategorizer<InputType> target, final InputType input, final boolean label) { OnlineKernelPerceptron.update(target, input, label, true); // Remove old instances to recover the budget. while (target.getExampleCount() > this.getBudget()) { final DefaultWeightedValue<InputType> entry = target.remove(0); target.setBias(target.getBias() - entry.getWeight()); } }
@Override public void update( final DefaultKernelBinaryCategorizer<InputType> target, final InputType input, final boolean label) { OnlineKernelPerceptron.update(target, input, label, true); // Remove old instances to recover the budget. while (target.getExampleCount() > this.getBudget()) { final DefaultWeightedValue<InputType> entry = target.remove(0); target.setBias(target.getBias() - entry.getWeight()); } }
@Override public void update( final DefaultKernelBinaryCategorizer<InputType> target, final InputType input, final boolean output) { OnlineKernelPerceptron.update(target, input, output, true); // Remove instances to recover the budget. int size = target.getExampleCount(); while (size > this.getBudget()) { final int randomIndex = this.getRandom().nextInt(size); final DefaultWeightedValue<InputType> entry = target.remove(randomIndex); target.setBias(target.getBias() - entry.getWeight()); size--; } }
@Override public void update( final DefaultKernelBinaryCategorizer<InputType> target, final InputType input, final boolean label) { OnlineKernelPerceptron.update(target, input, label, true); // Remove old instances to recover the budget. while (target.getExampleCount() > this.getBudget()) { final DefaultWeightedValue<InputType> entry = target.remove(0); target.setBias(target.getBias() - entry.getWeight()); } }
@Override public DataDistribution<ParameterType> learn( final Collection<? extends ObservationType> data) { ArrayList<DefaultWeightedValue<ParameterType>> weightedSamples = new ArrayList<DefaultWeightedValue<ParameterType>>( this.getNumSamples()); double maxWeight = Double.NEGATIVE_INFINITY; for( int n = 0; n < this.getNumSamples(); n++ ) { ParameterType parameter = this.getUpdater().makeProposal(random); double ll = this.getUpdater().computeLogLikelihood(parameter, data); double lq = this.getUpdater().computeLogImportanceValue(parameter); double weight = ll - lq; if( maxWeight < weight ) { maxWeight = weight; } weightedSamples.add( new DefaultWeightedValue<ParameterType>( parameter, weight ) ); } maxWeight -= Math.log(Double.MAX_VALUE/ this.getNumSamples() / 2.0 ); DataDistribution<ParameterType> retval = new DefaultDataDistribution<ParameterType>( this.getNumSamples()); for( DefaultWeightedValue<ParameterType> weightedSample : weightedSamples ) { double mass = Math.exp(weightedSample.getWeight() - maxWeight); retval.increment( weightedSample.getValue(), mass ); } return retval; }
@Override public DataDistribution<ParameterType> learn( final Collection<? extends ObservationType> data) { ArrayList<DefaultWeightedValue<ParameterType>> weightedSamples = new ArrayList<DefaultWeightedValue<ParameterType>>( this.getNumSamples()); double maxWeight = Double.NEGATIVE_INFINITY; for( int n = 0; n < this.getNumSamples(); n++ ) { ParameterType parameter = this.getUpdater().makeProposal(random); double ll = this.getUpdater().computeLogLikelihood(parameter, data); double lq = this.getUpdater().computeLogImportanceValue(parameter); double weight = ll - lq; if( maxWeight < weight ) { maxWeight = weight; } weightedSamples.add( new DefaultWeightedValue<ParameterType>( parameter, weight ) ); } maxWeight -= Math.log(Double.MAX_VALUE/ this.getNumSamples() / 2.0 ); DataDistribution<ParameterType> retval = new DefaultDataDistribution<ParameterType>( this.getNumSamples()); for( DefaultWeightedValue<ParameterType> weightedSample : weightedSamples ) { double mass = Math.exp(weightedSample.getWeight() - maxWeight); retval.increment( weightedSample.getValue(), mass ); } return retval; }
@Override public DataDistribution<ParameterType> learn( final Collection<? extends ObservationType> data) { ArrayList<DefaultWeightedValue<ParameterType>> weightedSamples = new ArrayList<DefaultWeightedValue<ParameterType>>( this.getNumSamples()); double maxWeight = Double.NEGATIVE_INFINITY; for( int n = 0; n < this.getNumSamples(); n++ ) { ParameterType parameter = this.getUpdater().makeProposal(random); double ll = this.getUpdater().computeLogLikelihood(parameter, data); double lq = this.getUpdater().computeLogImportanceValue(parameter); double weight = ll - lq; if( maxWeight < weight ) { maxWeight = weight; } weightedSamples.add( new DefaultWeightedValue<ParameterType>( parameter, weight ) ); } maxWeight -= Math.log(Double.MAX_VALUE/ this.getNumSamples() / 2.0 ); DataDistribution<ParameterType> retval = new DefaultDataDistribution<ParameterType>( this.getNumSamples()); for( DefaultWeightedValue<ParameterType> weightedSample : weightedSamples ) { double mass = Math.exp(weightedSample.getWeight() - maxWeight); retval.increment( weightedSample.getValue(), mass ); } return retval; }