@Override public void update(double[] params) { int i = 0; wrapped.lambda(params[i++]); wrapped.learningRate(params[i]); wrapped.stepOffset(1); wrapped.alpha(1); wrapped.decayExponent(0); }
public PassiveAggressive copy() { close(); PassiveAggressive r = new PassiveAggressive(numCategories(), numFeatures()); r.copyFrom(this); return r; }
public void setAveragingWindow(int averagingWindow) { seed.getPayload().getLearner().setWindowSize(averagingWindow); setupOptimizer(poolSize); }
@Override public void train(long trackingKey, String groupKey, int actual, Vector instance) { record++; buffer.add(new TrainingExample(trackingKey, groupKey, actual, instance)); //don't train until we have enough examples if (buffer.size() > bufferSize) { trainWithBufferedExamples(); } }
@Override public Vector classifyNoLink(Vector instance) { DenseVector hidden = inputToHidden(instance); return hiddenToOutput(hidden); }
public CrossFoldLearner decayExponent(double x) { for (OnlineLogisticRegression model : models) { model.decayExponent(x); } return this; }
/** * How often should the evolutionary optimization of learning parameters occur? * * @param interval Number of training examples to use in each epoch of optimization. */ public void setInterval(int interval) { setInterval(interval, interval); }
@Override public OnlineLogisticRegression lambda(double lambda) { // we only over-ride this to provide a more restrictive return type super.lambda(lambda); return this; }
public void copyFrom(OnlineLogisticRegression other) { super.copyFrom(other); mu0 = other.mu0; decayFactor = other.decayFactor; stepOffset = other.stepOffset; forgettingExponent = other.forgettingExponent; perTermAnnealingOffset = other.perTermAnnealingOffset; }
@Override public void close() { if (!sealed) { step++; regularizeAll(); sealed = true; } }
@Override public void update(double[] params) { int i = 0; wrapped.lambda(params[i++]); wrapped.learningRate(params[i]); wrapped.stepOffset(1); wrapped.alpha(1); wrapped.decayExponent(0); }
public PassiveAggressive copy() { close(); PassiveAggressive r = new PassiveAggressive(numCategories(), numFeatures()); r.copyFrom(this); return r; }