@Override protected boolean initializeAlgorithm() { this.setResult( this.getObjectToOptimize().clone() ); this.setIterationsWithoutImprovement(0); this.bestParameters = this.getResult().convertToVector(); this.bestParametersCost = SumSquaredErrorCostFunction.Cache.compute( this.getResult(), this.getData() ); this.setResultCost( this.bestParametersCost.parameterCost ); return true; }
/** * Creates a new instance of LevenbergMarquardtEstimation * @param dampingFactor * Current damping factor for the ridge regression * @param dampingFactorDivisor * Divisor of the damping factor on a successful iteration, must be greater * then 1.0, typically ~10.0 * @param maxIterations * Maximum iterations before stopping * @param maxIterationsWithoutImprovement * Number of sequential unsuccessful iterations without a cost-reducing step * @param tolerance * Stopping criterion for the algorithm, typically ~1e-5 */ public LevenbergMarquardtEstimation( double dampingFactor, double dampingFactorDivisor, int maxIterations, int maxIterationsWithoutImprovement, double tolerance ) { super( maxIterations, tolerance ); this.setDampingFactor(dampingFactor); this.setDampingFactorDivisor(dampingFactorDivisor); this.setMaxIterationsWithoutImprovement(maxIterationsWithoutImprovement); }
JtJpI.setElement(i, i, v - this.getDampingFactor() ); this.getResult().convertFromVector( trialParameters ); SumSquaredErrorCostFunction.Cache trialCost = SumSquaredErrorCostFunction.Cache.compute( this.getResult(), this.getData() ); this.bestParametersCost = trialCost; this.setResultCost( this.bestParametersCost.parameterCost ); if( delta < this.getTolerance() ) if( this.iterationsWithoutImprovement > this.getMaxIterationsWithoutImprovement() )
@Override protected void cleanupAlgorithm() { // Make sure we've uploaded the best parameters into the function // We won't have necessarily have the best parameters uploaded if // we didn't finish on the "delta" convergence condition this.getResult().convertFromVector( this.bestParameters ); }
JtJpI.setElement(i, i, v - this.getDampingFactor() ); this.getResult().convertFromVector( trialParameters ); SumSquaredErrorCostFunction.Cache trialCost = SumSquaredErrorCostFunction.Cache.compute( this.getResult(), this.getData() ); this.bestParametersCost = trialCost; this.setResultCost( this.bestParametersCost.parameterCost ); if( delta < this.getTolerance() ) if( this.iterationsWithoutImprovement > this.getMaxIterationsWithoutImprovement() )
@Override protected void cleanupAlgorithm() { // Make sure we've uploaded the best parameters into the function // We won't have necessarily have the best parameters uploaded if // we didn't finish on the "delta" convergence condition this.getResult().convertFromVector( this.bestParameters ); }
@Override protected boolean initializeAlgorithm() { this.setResult( this.getObjectToOptimize().clone() ); this.setIterationsWithoutImprovement(0); this.bestParameters = this.getResult().convertToVector(); this.bestParametersCost = SumSquaredErrorCostFunction.Cache.compute( this.getResult(), this.getData() ); this.setResultCost( this.bestParametersCost.parameterCost ); return true; }
JtJpI.setElement(i, i, v - this.getDampingFactor() ); this.getResult().convertFromVector( trialParameters ); SumSquaredErrorCostFunction.Cache trialCost = SumSquaredErrorCostFunction.Cache.compute( this.getResult(), this.getData() ); this.bestParametersCost = trialCost; this.setResultCost( this.bestParametersCost.parameterCost ); if( delta < this.getTolerance() ) if( this.iterationsWithoutImprovement > this.getMaxIterationsWithoutImprovement() )
/** * Creates a new instance of LevenbergMarquardtEstimation * @param dampingFactor * Current damping factor for the ridge regression * @param dampingFactorDivisor * Divisor of the damping factor on a successful iteration, must be greater * then 1.0, typically ~10.0 * @param maxIterations * Maximum iterations before stopping * @param maxIterationsWithoutImprovement * Number of sequential unsuccessful iterations without a cost-reducing step * @param tolerance * Stopping criterion for the algorithm, typically ~1e-5 */ public LevenbergMarquardtEstimation( double dampingFactor, double dampingFactorDivisor, int maxIterations, int maxIterationsWithoutImprovement, double tolerance ) { super( maxIterations, tolerance ); this.setDampingFactor(dampingFactor); this.setDampingFactorDivisor(dampingFactorDivisor); this.setMaxIterationsWithoutImprovement(maxIterationsWithoutImprovement); }
@Override protected void cleanupAlgorithm() { // Make sure we've uploaded the best parameters into the function // We won't have necessarily have the best parameters uploaded if // we didn't finish on the "delta" convergence condition this.getResult().convertFromVector( this.bestParameters ); }
@Override protected boolean initializeAlgorithm() { this.setResult( this.getObjectToOptimize().clone() ); this.setIterationsWithoutImprovement(0); this.bestParameters = this.getResult().convertToVector(); this.bestParametersCost = SumSquaredErrorCostFunction.Cache.compute( this.getResult(), this.getData() ); this.setResultCost( this.bestParametersCost.parameterCost ); return true; }
/** * Creates a new instance of LevenbergMarquardtEstimation * @param dampingFactor * Current damping factor for the ridge regression * @param dampingFactorDivisor * Divisor of the damping factor on a successful iteration, must be greater * then 1.0, typically ~10.0 * @param maxIterations * Maximum iterations before stopping * @param maxIterationsWithoutImprovement * Number of sequential unsuccessful iterations without a cost-reducing step * @param tolerance * Stopping criterion for the algorithm, typically ~1e-5 */ public LevenbergMarquardtEstimation( double dampingFactor, double dampingFactorDivisor, int maxIterations, int maxIterationsWithoutImprovement, double tolerance ) { super( maxIterations, tolerance ); this.setDampingFactor(dampingFactor); this.setDampingFactorDivisor(dampingFactorDivisor); this.setMaxIterationsWithoutImprovement(maxIterationsWithoutImprovement); }