/** Resets the weight vector to all zeros. */ public void forget() { super.forget(); awv = (AveragedWeightVector) weightVector; averagedBias = 0; }
/** * Sets the values of parameters that control the behavior of this learning algorithm. * * @param p The parameters. **/ public void setParameters(Parameters p) { super.setParameters(p); awv = (AveragedWeightVector) weightVector; }
/** * Writes the learned function's internal representation in binary form. * * @param out The output stream. **/ public void write(ExceptionlessOutputStream out) { super.write(out); out.writeDouble(averagedBias); }
/** * Retrieves the parameters that are set in this learner. * * @return An object containing all the values of the parameters that control the behavior of * this learning algorithm. **/ public Learner.Parameters getParameters() { Parameters p = new Parameters((SparsePerceptron.Parameters) super.getParameters()); return p; }
/** * Reads the binary representation of a learner with this object's run-time type, overwriting * any and all learned or manually specified parameters as well as the label lexicon but without * modifying the feature lexicon. * * @param in The input stream. **/ public void read(ExceptionlessInputStream in) { super.read(in); beta = in.readDouble(); }
/** * Scales the feature vector produced by the extractor by the learning rate and subtracts it * from the weight vector. * * @param exampleFeatures The example's array of feature indices. * @param exampleValues The example's array of feature values. * @param rate The learning rate at which the weights are updated. **/ public void demote(int[] exampleFeatures, double[] exampleValues, double rate) { if (!nearlyEqualTo(rate, 0.0)) { super.demote(exampleFeatures, exampleValues, rate); } }
/** * Scales the feature vector produced by the extractor by the learning rate and adds it to the * weight vector. * * @param exampleFeatures The example's array of feature indices. * @param exampleValues The example's array of feature values. * @param rate The learning rate at which the weights are updated. **/ public void promote(int[] exampleFeatures, double[] exampleValues, double rate) { if (!nearlyEqualTo(rate, 0.0)) { super.promote(exampleFeatures, exampleValues, rate); } }
/** * Writes the learned function's internal representation in binary form. * * @param out The output stream. **/ public void write(ExceptionlessOutputStream out) { super.write(out); out.writeDouble(beta); }
/** * Retrieves the parameters that are set in this learner. * * @return An object containing all the values of the parameters that control the behavior of * this learning algorithm. **/ public Learner.Parameters getParameters() { Parameters p = new Parameters((SparsePerceptron.Parameters) super.getParameters()); p.beta = beta; return p; }
/** * Reads the binary representation of a learner with this object's run-time type, overwriting * any and all learned or manually specified parameters as well as the label lexicon but without * modifying the feature lexicon. * * @param in The input stream. **/ public void read(ExceptionlessInputStream in) { super.read(in); awv = (AveragedWeightVector) weightVector; averagedBias = in.readDouble(); }
/** * Sets the values of parameters that control the behavior of this learning algorithm. * * @param p The parameters. **/ public void setParameters(Parameters p) { super.setParameters(p); beta = p.beta; }