/** * Creates a new instance of FletcherXuHybridEstimation * * @param lineMinimizer * Workhorse algorithm that finds the minimum along a particular direction * @param reductionTest * Reduction test for switching between BFGS and Levenberg-Marquardt, must * be [0,1]. Lower values result in more Levenberg-Marquardt steps, * larger values result in more BFGS steps. * @param dampingFactorDivisor * Amount to modify the damping factor, typically 2.0 or 10.0 * @param maxIterations * Maximum number of iterations before stopping * @param tolerance * Tolerance of the algorithm. */ public FletcherXuHybridEstimation( LineMinimizer<?> lineMinimizer, double reductionTest, double dampingFactorDivisor, int maxIterations, double tolerance ) { super( maxIterations, tolerance ); this.setLineMinimizer( lineMinimizer ); this.setReductionTest( reductionTest ); this.setDampingFactorDivisor( dampingFactorDivisor ); }
@Override protected boolean step() InputOutputPair<Vector,Double> result = this.getLineMinimizer().minimizeAlongDirection( this.lineFunction, this.lastCost.parameterCost, this.lastCost.Jte ); this.getResult().convertFromVector( result.getInput() ); this.getResult(), this.getData() ); this.setResultCost( cache.parameterCost ); if( this.getReductionTest()*this.lastCost.parameterCost <= (this.lastCost.parameterCost - cache.parameterCost) ) this.dampingFactor /= this.getDampingFactorDivisor(); cache.Jte.minus( lastCost.Jte ) ); FunctionMinimizerBFGS.BFGSupdateRule( this.hessianInverse, delta, gamma, this.getTolerance() ); this.lineFunction.setDirection( direction ); this.dampingFactor *= this.getDampingFactorDivisor(); result.getInput(), result.getOutput(), delta, cache.Jte, this.getTolerance() );
@Override protected boolean initializeAlgorithm() { this.setResult( this.getObjectToOptimize().clone() ); this.getCostFunction().setCostParameters( this.getData() ); this.dampingFactor = 1.0; this.lastCost = SumSquaredErrorCostFunction.Cache.compute( this.getResult(), this.getData() ); ParameterDifferentiableCostMinimizer.ParameterCostEvaluatorDerivativeBased f = new ParameterDifferentiableCostMinimizer.ParameterCostEvaluatorDerivativeBased( this.getResult(), this.getCostFunction() ); // Load up the line function with the current direction and // the search direction, which is the negative gradient, in other words // the direction of steepest descent Vector parameters = this.getResult().convertToVector(); int M = parameters.getDimensionality(); this.lineFunction = new DirectionalVectorToDifferentiableScalarFunction( f, parameters, this.lastCost.Jte ); this.hessianInverse = MatrixFactory.getDefault().createIdentity( M, M ).scale( 0.5 ); return true; }
@Override protected boolean step() InputOutputPair<Vector,Double> result = this.getLineMinimizer().minimizeAlongDirection( this.lineFunction, this.lastCost.parameterCost, this.lastCost.Jte ); this.getResult().convertFromVector( result.getInput() ); this.getResult(), this.getData() ); this.setResultCost( cache.parameterCost ); if( this.getReductionTest()*this.lastCost.parameterCost <= (this.lastCost.parameterCost - cache.parameterCost) ) this.dampingFactor /= this.getDampingFactorDivisor(); cache.Jte.minus( lastCost.Jte ) ); FunctionMinimizerBFGS.BFGSupdateRule( this.hessianInverse, delta, gamma, this.getTolerance() ); this.lineFunction.setDirection( direction ); this.dampingFactor *= this.getDampingFactorDivisor(); result.getInput(), result.getOutput(), delta, cache.Jte, this.getTolerance() );
@Override protected boolean initializeAlgorithm() { this.setResult( this.getObjectToOptimize().clone() ); this.getCostFunction().setCostParameters( this.getData() ); this.dampingFactor = 1.0; this.lastCost = SumSquaredErrorCostFunction.Cache.compute( this.getResult(), this.getData() ); ParameterDifferentiableCostMinimizer.ParameterCostEvaluatorDerivativeBased f = new ParameterDifferentiableCostMinimizer.ParameterCostEvaluatorDerivativeBased( this.getResult(), this.getCostFunction() ); // Load up the line function with the current direction and // the search direction, which is the negative gradient, in other words // the direction of steepest descent Vector parameters = this.getResult().convertToVector(); int M = parameters.getDimensionality(); this.lineFunction = new DirectionalVectorToDifferentiableScalarFunction( f, parameters, this.lastCost.Jte ); this.hessianInverse = MatrixFactory.getDefault().createIdentity( M, M ).scale( 0.5 ); return true; }
@Override protected boolean step() InputOutputPair<Vector,Double> result = this.getLineMinimizer().minimizeAlongDirection( this.lineFunction, this.lastCost.parameterCost, this.lastCost.Jte ); this.getResult().convertFromVector( result.getInput() ); this.getResult(), this.getData() ); this.setResultCost( cache.parameterCost ); if( this.getReductionTest()*this.lastCost.parameterCost <= (this.lastCost.parameterCost - cache.parameterCost) ) this.dampingFactor /= this.getDampingFactorDivisor(); cache.Jte.minus( lastCost.Jte ) ); FunctionMinimizerBFGS.BFGSupdateRule( this.hessianInverse, delta, gamma, this.getTolerance() ); this.lineFunction.setDirection( direction ); this.dampingFactor *= this.getDampingFactorDivisor(); result.getInput(), result.getOutput(), delta, cache.Jte, this.getTolerance() );
@Override protected boolean initializeAlgorithm() { this.setResult( this.getObjectToOptimize().clone() ); this.getCostFunction().setCostParameters( this.getData() ); this.dampingFactor = 1.0; this.lastCost = SumSquaredErrorCostFunction.Cache.compute( this.getResult(), this.getData() ); ParameterDifferentiableCostMinimizer.ParameterCostEvaluatorDerivativeBased f = new ParameterDifferentiableCostMinimizer.ParameterCostEvaluatorDerivativeBased( this.getResult(), this.getCostFunction() ); // Load up the line function with the current direction and // the search direction, which is the negative gradient, in other words // the direction of steepest descent Vector parameters = this.getResult().convertToVector(); int M = parameters.getDimensionality(); this.lineFunction = new DirectionalVectorToDifferentiableScalarFunction( f, parameters, this.lastCost.Jte ); this.hessianInverse = MatrixFactory.getDefault().createIdentity( M, M ).scale( 0.5 ); return true; }
/** * Creates a new instance of FletcherXuHybridEstimation * * @param lineMinimizer * Workhorse algorithm that finds the minimum along a particular direction * @param reductionTest * Reduction test for switching between BFGS and Levenberg-Marquardt, must * be [0,1]. Lower values result in more Levenberg-Marquardt steps, * larger values result in more BFGS steps. * @param dampingFactorDivisor * Amount to modify the damping factor, typically 2.0 or 10.0 * @param maxIterations * Maximum number of iterations before stopping * @param tolerance * Tolerance of the algorithm. */ public FletcherXuHybridEstimation( LineMinimizer<?> lineMinimizer, double reductionTest, double dampingFactorDivisor, int maxIterations, double tolerance ) { super( maxIterations, tolerance ); this.setLineMinimizer( lineMinimizer ); this.setReductionTest( reductionTest ); this.setDampingFactorDivisor( dampingFactorDivisor ); }
/** * Creates a new instance of FletcherXuHybridEstimation * * @param lineMinimizer * Workhorse algorithm that finds the minimum along a particular direction * @param reductionTest * Reduction test for switching between BFGS and Levenberg-Marquardt, must * be [0,1]. Lower values result in more Levenberg-Marquardt steps, * larger values result in more BFGS steps. * @param dampingFactorDivisor * Amount to modify the damping factor, typically 2.0 or 10.0 * @param maxIterations * Maximum number of iterations before stopping * @param tolerance * Tolerance of the algorithm. */ public FletcherXuHybridEstimation( LineMinimizer<?> lineMinimizer, double reductionTest, double dampingFactorDivisor, int maxIterations, double tolerance ) { super( maxIterations, tolerance ); this.setLineMinimizer( lineMinimizer ); this.setReductionTest( reductionTest ); this.setDampingFactorDivisor( dampingFactorDivisor ); }